| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| example.js | 100% | (83 / 83) | 100% | (73 / 73) | 100% | (12 / 12) | 100% | (83 / 83) | |
| lib.npmtest_newrelic.js | 100% | (16 / 16) | 100% | (14 / 14) | 100% | (3 / 3) | 100% | (16 / 16) | |
| test.js | 100% | (54 / 54) | 100% | (39 / 39) | 100% | (13 / 13) | 100% | (54 / 54) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 | 2 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 1 2 1 1 1 1 1 1 1 1 1 2 1 1 1 1 2 2 3 3 3 3 1 3 3 3 1 3 1 1 1 1 1 1 1 1 1 1 1 1 6 6 1 2 1 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | /*
example.js
quickstart example
instruction
1. save this script as example.js
2. run the shell command:
$ npm install npmtest-newrelic && PORT=8081 node example.js
3. play with the browser-demo on http://127.0.0.1:8081
*/
/* istanbul instrument in package npmtest_newrelic */
/*jslint
bitwise: true,
browser: true,
maxerr: 8,
maxlen: 96,
node: true,
nomen: true,
regexp: true,
stupid: true
*/
(function () {
'use strict';
var local;
// run shared js-env code - pre-init
(function () {
// init local
local = {};
// init modeJs
local.modeJs = (function () {
try {
return typeof navigator.userAgent === 'string' &&
typeof document.querySelector('body') === 'object' &&
typeof XMLHttpRequest.prototype.open === 'function' &&
'browser';
} catch (errorCaughtBrowser) {
return module.exports &&
typeof process.versions.node === 'string' &&
typeof require('http').createServer === 'function' &&
'node';
}
}());
// init global
local.global = local.modeJs === 'browser'
? window
: global;
// init utility2_rollup
local = local.global.utility2_rollup || (local.modeJs === 'browser'
? local.global.utility2_npmtest_newrelic
: global.utility2_moduleExports);
// export local
local.global.local = local;
}());
switch (local.modeJs) {
// post-init
// run browser js-env code - post-init
/* istanbul ignore next */
case 'browser':
local.testRunBrowser = function (event) {
Eif (!event || (event &&
event.currentTarget &&
event.currentTarget.className &&
event.currentTarget.className.includes &&
event.currentTarget.className.includes('onreset'))) {
// reset output
Array.from(
document.querySelectorAll('body > .resettable')
).forEach(function (element) {
switch (element.tagName) {
case 'INPUT':
case 'TEXTAREA':
element.value = '';
break;
default:
element.textContent = '';
}
});
}
switch (event && event.currentTarget && event.currentTarget.id) {
case 'testRunButton1':
// show tests
Eif (document.querySelector('#testReportDiv1').style.display === 'none') {
document.querySelector('#testReportDiv1').style.display = 'block';
document.querySelector('#testRunButton1').textContent =
'hide internal test';
local.modeTest = true;
local.testRunDefault(local);
// hide tests
} else {
document.querySelector('#testReportDiv1').style.display = 'none';
document.querySelector('#testRunButton1').textContent = 'run internal test';
}
break;
// custom-case
default:
break;
}
Iif (document.querySelector('#inputTextareaEval1') && (!event || (event &&
event.currentTarget &&
event.currentTarget.className &&
event.currentTarget.className.includes &&
event.currentTarget.className.includes('oneval')))) {
// try to eval input-code
try {
/*jslint evil: true*/
eval(document.querySelector('#inputTextareaEval1').value);
} catch (errorCaught) {
console.error(errorCaught);
}
}
};
// log stderr and stdout to #outputTextareaStdout1
['error', 'log'].forEach(function (key) {
console[key + '_original'] = console[key];
console[key] = function () {
var element;
console[key + '_original'].apply(console, arguments);
element = document.querySelector('#outputTextareaStdout1');
Iif (!element) {
return;
}
// append text to #outputTextareaStdout1
element.value += Array.from(arguments).map(function (arg) {
return typeof arg === 'string'
? arg
: JSON.stringify(arg, null, 4);
}).join(' ') + '\n';
// scroll textarea to bottom
element.scrollTop = element.scrollHeight;
};
});
// init event-handling
['change', 'click', 'keyup'].forEach(function (event) {
Array.from(document.querySelectorAll('.on' + event)).forEach(function (element) {
element.addEventListener(event, local.testRunBrowser);
});
});
// run tests
local.testRunBrowser();
break;
// run node js-env code - post-init
/* istanbul ignore next */
case 'node':
// export local
module.exports = local;
// require modules
local.fs = require('fs');
local.http = require('http');
local.url = require('url');
// init assets
local.assetsDict = local.assetsDict || {};
/* jslint-ignore-begin */
local.assetsDict['/assets.index.template.html'] = '\
<!doctype html>\n\
<html lang="en">\n\
<head>\n\
<meta charset="UTF-8">\n\
<meta name="viewport" content="width=device-width, initial-scale=1">\n\
<title>{{env.npm_package_name}} (v{{env.npm_package_version}})</title>\n\
<style>\n\
/*csslint\n\
box-sizing: false,\n\
universal-selector: false\n\
*/\n\
* {\n\
box-sizing: border-box;\n\
}\n\
body {\n\
background: #dde;\n\
font-family: Arial, Helvetica, sans-serif;\n\
margin: 2rem;\n\
}\n\
body > * {\n\
margin-bottom: 1rem;\n\
}\n\
.utility2FooterDiv {\n\
margin-top: 20px;\n\
text-align: center;\n\
}\n\
</style>\n\
<style>\n\
/*csslint\n\
*/\n\
textarea {\n\
font-family: monospace;\n\
height: 10rem;\n\
width: 100%;\n\
}\n\
textarea[readonly] {\n\
background: #ddd;\n\
}\n\
</style>\n\
</head>\n\
<body>\n\
<!-- utility2-comment\n\
<div id="ajaxProgressDiv1" style="background: #d00; height: 2px; left: 0; margin: 0; padding: 0; position: fixed; top: 0; transition: background 0.5s, width 1.5s; width: 25%;"></div>\n\
utility2-comment -->\n\
<h1>\n\
<!-- utility2-comment\n\
<a\n\
{{#if env.npm_package_homepage}}\n\
href="{{env.npm_package_homepage}}"\n\
{{/if env.npm_package_homepage}}\n\
target="_blank"\n\
>\n\
utility2-comment -->\n\
{{env.npm_package_name}} (v{{env.npm_package_version}})\n\
<!-- utility2-comment\n\
</a>\n\
utility2-comment -->\n\
</h1>\n\
<h3>{{env.npm_package_description}}</h3>\n\
<!-- utility2-comment\n\
<h4><a download href="assets.app.js">download standalone app</a></h4>\n\
<button class="onclick onreset" id="testRunButton1">run internal test</button><br>\n\
<div id="testReportDiv1" style="display: none;"></div>\n\
utility2-comment -->\n\
\n\
\n\
\n\
<label>stderr and stdout</label>\n\
<textarea class="resettable" id="outputTextareaStdout1" readonly></textarea>\n\
<!-- utility2-comment\n\
{{#if isRollup}}\n\
<script src="assets.app.js"></script>\n\
{{#unless isRollup}}\n\
utility2-comment -->\n\
<script src="assets.utility2.rollup.js"></script>\n\
<script src="jsonp.utility2._stateInit?callback=window.utility2._stateInit"></script>\n\
<script src="assets.npmtest_newrelic.rollup.js"></script>\n\
<script src="assets.example.js"></script>\n\
<script src="assets.test.js"></script>\n\
<!-- utility2-comment\n\
{{/if isRollup}}\n\
utility2-comment -->\n\
<div class="utility2FooterDiv">\n\
[ this app was created with\n\
<a href="https://github.com/kaizhu256/node-utility2" target="_blank">utility2</a>\n\
]\n\
</div>\n\
</body>\n\
</html>\n\
';
/* jslint-ignore-end */
Iif (local.templateRender) {
local.assetsDict['/'] = local.templateRender(
local.assetsDict['/assets.index.template.html'],
{
env: local.objectSetDefault(local.env, {
npm_package_description: 'the greatest app in the world!',
npm_package_name: 'my-app',
npm_package_nameAlias: 'my_app',
npm_package_version: '0.0.1'
})
}
);
} else {
local.assetsDict['/'] = local.assetsDict['/assets.index.template.html']
.replace((/\{\{env\.(\w+?)\}\}/g), function (match0, match1) {
// jslint-hack
String(match0);
switch (match1) {
case 'npm_package_description':
return 'the greatest app in the world!';
case 'npm_package_name':
return 'my-app';
case 'npm_package_nameAlias':
return 'my_app';
case 'npm_package_version':
return '0.0.1';
}
});
}
// run the cli
Eif (local.global.utility2_rollup || module !== require.main) {
break;
}
local.assetsDict['/assets.example.js'] =
local.assetsDict['/assets.example.js'] ||
local.fs.readFileSync(__filename, 'utf8');
// bug-workaround - long $npm_package_buildCustomOrg
/* jslint-ignore-begin */
local.assetsDict['/assets.npmtest_newrelic.rollup.js'] =
local.assetsDict['/assets.npmtest_newrelic.rollup.js'] ||
local.fs.readFileSync(
local.npmtest_newrelic.__dirname + '/lib.npmtest_newrelic.js',
'utf8'
).replace((/^#!/), '//');
/* jslint-ignore-end */
local.assetsDict['/favicon.ico'] = local.assetsDict['/favicon.ico'] || '';
// if $npm_config_timeout_exit exists,
// then exit this process after $npm_config_timeout_exit ms
if (Number(process.env.npm_config_timeout_exit)) {
setTimeout(process.exit, Number(process.env.npm_config_timeout_exit));
}
// start server
if (local.global.utility2_serverHttp1) {
break;
}
process.env.PORT = process.env.PORT || '8081';
console.error('server starting on port ' + process.env.PORT);
local.http.createServer(function (request, response) {
request.urlParsed = local.url.parse(request.url);
if (local.assetsDict[request.urlParsed.pathname] !== undefined) {
response.end(local.assetsDict[request.urlParsed.pathname]);
return;
}
response.statusCode = 404;
response.end();
}).listen(process.env.PORT);
break;
}
}());
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | 2 2 2 2 2 2 2 1 2 2 2 2 1 1 1 1 | /* istanbul instrument in package npmtest_newrelic */
/*jslint
bitwise: true,
browser: true,
maxerr: 8,
maxlen: 96,
node: true,
nomen: true,
regexp: true,
stupid: true
*/
(function () {
'use strict';
var local;
// run shared js-env code - pre-init
(function () {
// init local
local = {};
// init modeJs
local.modeJs = (function () {
try {
return typeof navigator.userAgent === 'string' &&
typeof document.querySelector('body') === 'object' &&
typeof XMLHttpRequest.prototype.open === 'function' &&
'browser';
} catch (errorCaughtBrowser) {
return module.exports &&
typeof process.versions.node === 'string' &&
typeof require('http').createServer === 'function' &&
'node';
}
}());
// init global
local.global = local.modeJs === 'browser'
? window
: global;
// init utility2_rollup
local = local.global.utility2_rollup || local;
// init lib
local.local = local.npmtest_newrelic = local;
// init exports
if (local.modeJs === 'browser') {
local.global.utility2_npmtest_newrelic = local;
} else {
module.exports = local;
module.exports.__dirname = __dirname;
module.exports.module = module;
}
}());
}());
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 | 2 2 2 2 2 2 2 1 2 2 1 1 1 1 2 2 2 2 1 1 2 2 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 1 2 2 1 2 2 1 2 2 1 1 1 1 1 | /* istanbul instrument in package npmtest_newrelic */
/*jslint
bitwise: true,
browser: true,
maxerr: 8,
maxlen: 96,
node: true,
nomen: true,
regexp: true,
stupid: true
*/
(function () {
'use strict';
var local;
// run shared js-env code - pre-init
(function () {
// init local
local = {};
// init modeJs
local.modeJs = (function () {
try {
return typeof navigator.userAgent === 'string' &&
typeof document.querySelector('body') === 'object' &&
typeof XMLHttpRequest.prototype.open === 'function' &&
'browser';
} catch (errorCaughtBrowser) {
return module.exports &&
typeof process.versions.node === 'string' &&
typeof require('http').createServer === 'function' &&
'node';
}
}());
// init global
local.global = local.modeJs === 'browser'
? window
: global;
switch (local.modeJs) {
// re-init local from window.local
case 'browser':
local = local.global.utility2.objectSetDefault(
local.global.utility2_rollup || local.global.local,
local.global.utility2
);
break;
// re-init local from example.js
case 'node':
local = (local.global.utility2_rollup || require('utility2'))
.requireReadme();
break;
}
// export local
local.global.local = local;
}());
// run shared js-env code - function
(function () {
return;
}());
switch (local.modeJs) {
// run browser js-env code - function
case 'browser':
break;
// run node js-env code - function
case 'node':
break;
}
// run shared js-env code - post-init
(function () {
return;
}());
switch (local.modeJs) {
// run browser js-env code - post-init
case 'browser':
local.testCase_browser_nullCase = local.testCase_browser_nullCase || function (
options,
onError
) {
/*
* this function will test browsers's null-case handling-behavior-behavior
*/
onError(null, options);
};
// run tests
local.nop(local.modeTest &&
document.querySelector('#testRunButton1') &&
document.querySelector('#testRunButton1').click());
break;
// run node js-env code - post-init
/* istanbul ignore next */
case 'node':
local.testCase_buildApidoc_default = local.testCase_buildApidoc_default || function (
options,
onError
) {
/*
* this function will test buildApidoc's default handling-behavior-behavior
*/
options = { modulePathList: module.paths };
local.buildApidoc(options, onError);
};
local.testCase_buildApp_default = local.testCase_buildApp_default || function (
options,
onError
) {
/*
* this function will test buildApp's default handling-behavior-behavior
*/
local.testCase_buildReadme_default(options, local.onErrorThrow);
local.testCase_buildLib_default(options, local.onErrorThrow);
local.testCase_buildTest_default(options, local.onErrorThrow);
local.testCase_buildCustomOrg_default(options, local.onErrorThrow);
options = [];
local.buildApp(options, onError);
};
local.testCase_buildCustomOrg_default = local.testCase_buildCustomOrg_default ||
function (options, onError) {
/*
* this function will test buildCustomOrg's default handling-behavior
*/
options = {};
local.buildCustomOrg(options, onError);
};
local.testCase_buildLib_default = local.testCase_buildLib_default || function (
options,
onError
) {
/*
* this function will test buildLib's default handling-behavior
*/
options = {};
local.buildLib(options, onError);
};
local.testCase_buildReadme_default = local.testCase_buildReadme_default || function (
options,
onError
) {
/*
* this function will test buildReadme's default handling-behavior-behavior
*/
options = {};
local.buildReadme(options, onError);
};
local.testCase_buildTest_default = local.testCase_buildTest_default || function (
options,
onError
) {
/*
* this function will test buildTest's default handling-behavior
*/
options = {};
local.buildTest(options, onError);
};
local.testCase_webpage_default = local.testCase_webpage_default || function (
options,
onError
) {
/*
* this function will test webpage's default handling-behavior
*/
options = { modeCoverageMerge: true, url: local.serverLocalHost + '?modeTest=1' };
local.browserTest(options, onError);
};
// run test-server
local.testRunServer(local);
break;
}
}());
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| api.js | 10.61% | (35 / 330) | 0% | (0 / 181) | 0% | (0 / 27) | 10.94% | (35 / 320) | |
| index.js | 45.28% | (24 / 53) | 31.25% | (5 / 16) | 50% | (1 / 2) | 45.28% | (24 / 53) | |
| newrelic.js | 100% | (1 / 1) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (1 / 1) | |
| stub_api.js | 58.33% | (21 / 36) | 0% | (0 / 6) | 28.57% | (2 / 7) | 58.33% | (21 / 36) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var util = require('util')
var logger = require('./lib/logger').child({component: 'api'})
var NAMES = require('./lib/metrics/names')
var recordWeb = require('./lib/metrics/recorders/http.js')
var recordBackground = require('./lib/metrics/recorders/other.js')
var customRecorder = require('./lib/metrics/recorders/custom')
var hashes = require('./lib/util/hashes')
var stringify = require('json-stringify-safe')
/*
*
* CONSTANTS
*
*/
var RUM_STUB = "<script type='text/javascript'>window.NREUM||(NREUM={});" +
"NREUM.info = %s; %s</script>"
// these messages are used in the _gracefail() method below in getBrowserTimingHeader
var RUM_ISSUES = [
'NREUM: no browser monitoring headers generated; disabled',
'NREUM: transaction missing while generating browser monitoring headers',
'NREUM: config.browser_monitoring missing, something is probably wrong',
'NREUM: browser_monitoring headers need a transaction name',
'NREUM: browser_monitoring requires valid application_id',
'NREUM: browser_monitoring requires valid browser_key',
'NREUM: browser_monitoring requires js_agent_loader script',
'NREUM: browser_monitoring disabled by browser_monitoring.loader config'
]
// can't overwrite internal parameters or all heck will break loose
var CUSTOM_BLACKLIST = [
'nr_flatten_leading'
]
var CUSTOM_EVENT_TYPE_REGEX = /^[a-zA-Z0-9:_ ]+$/
/**
* The exported New Relic API. This contains all of the functions meant to be
* used by New Relic customers. For now, that means transaction naming.
*/
function API(agent) {
this.agent = agent
}
/**
* Give the current transaction a custom name. Overrides any New Relic naming
* rules set in configuration or from New Relic's servers.
*
* IMPORTANT: this function must be called when a transaction is active. New
* Relic transactions are tied to web requests, so this method may be called
* from within HTTP or HTTPS listener functions, Express routes, or other
* contexts where a web request or response object are in scope.
*
* @param {string} name The name you want to give the web request in the New
* Relic UI. Will be prefixed with 'Custom/' when sent.
*/
API.prototype.setTransactionName = function setTransactionName(name) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/setTransactionName'
)
metric.incrementCallCount()
var transaction = this.agent.tracer.getTransaction()
if (!transaction) {
return logger.warn("No transaction found when setting name to '%s'.", name)
}
if (!name) {
if (transaction && transaction.url) {
logger.error("Must include name in setTransactionName call for URL %s.",
transaction.url)
} else {
logger.error("Must include name in setTransactionName call.")
}
return
}
transaction.forceName = NAMES.CUSTOM + '/' + name
}
/**
* Give the current transaction a name based on your own idea of what
* constitutes a controller in your Node application. Also allows you to
* optionally specify the action being invoked on the controller. If the action
* is omitted, then the API will default to using the HTTP method used in the
* request (e.g. GET, POST, DELETE). Overrides any New Relic naming rules set
* in configuration or from New Relic's servers.
*
* IMPORTANT: this function must be called when a transaction is active. New
* Relic transactions are tied to web requests, so this method may be called
* from within HTTP or HTTPS listener functions, Express routes, or other
* contexts where a web request or response object are in scope.
*
* @param {string} name The name you want to give the controller in the New
* Relic UI. Will be prefixed with 'Controller/' when
* sent.
* @param {string} action The action being invoked on the controller. Defaults
* to the HTTP method used for the request.
*/
API.prototype.setControllerName = function setControllerName(name, action) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/setControllerName'
)
metric.incrementCallCount()
var transaction = this.agent.tracer.getTransaction()
if (!transaction) {
return logger.warn("No transaction found when setting controller to %s.", name)
}
if (!name) {
if (transaction && transaction.url) {
logger.error("Must include name in setControllerName call for URL %s.",
transaction.url)
} else {
logger.error("Must include name in setControllerName call.")
}
return
}
action = action || transaction.verb || 'GET'
transaction.forceName = NAMES.CONTROLLER + '/' + name + '/' + action
}
/**
* Add a custom parameter to the current transaction. Some parameters are
* reserved (see CUSTOM_BLACKLIST for the current, very short list), and
* as with most API methods, this must be called in the context of an
* active transaction. Most recently set value wins.
*
* @param {string} name The name you want displayed in the RPM UI.
* @param {string} value The value you want displayed. Must be serializable.
*/
API.prototype.addCustomParameter = function addCustomParameter(name, value) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/addCustomParameter'
)
metric.incrementCallCount()
// If high security mode is on, custom params are disabled.
if (this.agent.config.high_security === true) {
logger.warnOnce(
"Custom params",
"Custom parameters are disabled by high security mode."
)
return false
}
var ignored = this.agent.config.ignored_params || []
var transaction = this.agent.tracer.getTransaction()
if (!transaction) {
return logger.warn("No transaction found for custom parameters.")
}
var trace = transaction.trace
if (!trace.custom) {
return logger.warn(
"Couldn't add parameter %s to nonexistent custom parameters.",
name
)
}
if (CUSTOM_BLACKLIST.indexOf(name) !== -1) {
return logger.warn("Not overwriting value of NR-only parameter %s.", name)
}
if (ignored.indexOf(name) !== -1) {
return logger.warn("Not setting ignored parameter name %s.", name)
}
if (name in trace.custom) {
logger.debug(
"Changing custom parameter %s from %s to %s.",
name,
trace.custom[name],
value
)
}
trace.custom[name] = value
}
/**
* Adds all custom parameters in an object to the current transaction.
*
* See documentation for newrelic.addCustomParameter for more information on
* setting custom parameters.
*
* An example of setting a custom parameter object:
*
* newrelic.addCustomParameters({test: 'value', test2: 'value2'});
*
* @param {object} [params]
* @param {string} [params.KEY] The name you want displayed in the RPM UI.
* @param {string} [params.KEY.VALUE] The value you want displayed. Must be serializable.
*/
API.prototype.addCustomParameters = function addCustomParameters(params) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/addCustomParameters'
)
metric.incrementCallCount()
for (var key in params) {
if (!params.hasOwnProperty(key)) {
continue
}
this.addCustomParameter(key, params[key])
}
}
/**
* Tell the tracer whether to ignore the current transaction. The most common
* use for this will be to mark a transaction as ignored (maybe it's handling
* a websocket polling channel, or maybe it's an external call you don't care
* is slow), but it's also useful when you want a transaction that would
* otherwise be ignored due to URL or transaction name normalization rules
* to *not* be ignored.
*
* @param {boolean} ignored Ignore, or don't ignore, the current transaction.
*/
API.prototype.setIgnoreTransaction = function setIgnoreTransaction(ignored) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/setIgnoreTransaction'
)
metric.incrementCallCount()
var transaction = this.agent.tracer.getTransaction()
if (!transaction) {
return logger.warn("No transaction found to ignore.")
}
transaction.forceIgnore = ignored
}
/**
* Send errors to New Relic that you've already handled yourself. Should be an
* `Error` or one of its subtypes, but the API will handle strings and objects
* that have an attached `.message` or `.stack` property.
*
* NOTE: Errors that are recorded using this method do _not_ obey the
* `ignore_status_codes` configuration.
*
* @param {Error} error
* The error to be traced.
*
* @param {object} [customParameters]
* Optional. Any custom parameters to be displayed in the New Relic UI.
*/
API.prototype.noticeError = function noticeError(error, customParameters) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/noticeError'
)
metric.incrementCallCount()
if (typeof error === 'string') error = new Error(error)
var transaction = this.agent.tracer.getTransaction()
this.agent.errors.addUserError(transaction, error, customParameters)
}
/**
* If the URL for a transaction matches the provided pattern, name the
* transaction with the provided name. If there are capture groups in the
* pattern (which is a standard JavaScript regular expression, and can be
* passed as either a RegExp or a string), then the substring matches ($1, $2,
* etc.) are replaced in the name string. BE CAREFUL WHEN USING SUBSTITUTION.
* If the replacement substrings are highly variable (i.e. are identifiers,
* GUIDs, or timestamps), the rule will generate too many metrics and
* potentially get your application blacklisted by New Relic.
*
* An example of a good rule with replacements:
*
* newrelic.addNamingRule('^/storefront/(v[1-5])/(item|category|tag)',
* 'CommerceAPI/$1/$2')
*
* An example of a bad rule with replacements:
*
* newrelic.addNamingRule('^/item/([0-9a-f]+)', 'Item/$1')
*
* Keep in mind that the original URL and any query parameters will be sent
* along with the request, so slow transactions will still be identifiable.
*
* Naming rules can not be removed once added. They can also be added via the
* agent's configuration. See configuration documentation for details.
*
* @param {RegExp} pattern The pattern to rename (with capture groups).
* @param {string} name The name to use for the transaction.
*/
API.prototype.addNamingRule = function addNamingRule(pattern, name) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/addNamingRule'
)
metric.incrementCallCount()
if (!name) return logger.error("Simple naming rules require a replacement name.")
this.agent.userNormalizer.addSimple(pattern, '/' + name)
}
/**
* If the URL for a transaction matches the provided pattern, ignore the
* transaction attached to that URL. Useful for filtering socket.io connections
* and other long-polling requests out of your agents to keep them from
* distorting an app's apdex or mean response time. Pattern may be a (standard
* JavaScript) RegExp or a string.
*
* Example:
*
* newrelic.addIgnoringRule('^/socket\\.io/')
*
* @param {RegExp} pattern The pattern to ignore.
*/
API.prototype.addIgnoringRule = function addIgnoringRule(pattern) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/addIgnoringRule'
)
metric.incrementCallCount()
if (!pattern) return logger.error("Must include a URL pattern to ignore.")
this.agent.userNormalizer.addSimple(pattern, null)
}
/**
* Get the <script>...</script> header necessary for Browser Monitoring
* This script must be manually injected into your templates, as high as possible
* in the header, but _after_ any X-UA-COMPATIBLE HTTP-EQUIV meta tags.
* Otherwise you may hurt IE!
*
* This method must be called _during_ a transaction, and must be called every
* time you want to generate the headers.
*
* Do *not* reuse the headers between users, or even between requests.
*
* @returns {string} the <script> header to be injected
*/
API.prototype.getBrowserTimingHeader = function getBrowserTimingHeader() {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/getBrowserTimingHeader'
)
metric.incrementCallCount()
var config = this.agent.config
/**
* Gracefully fail.
*
* Output an HTML comment and log a warning the comment is meant to be
* innocuous to the end user.
*
* @param {number} num - Error code from `RUM_ISSUES`.
* @param {bool} [quite=false] - Be quiet about this failure.
*
* @see RUM_ISSUES
*/
function _gracefail(num, quiet) {
if (quiet) {
logger.debug(RUM_ISSUES[num])
} else {
logger.warn(RUM_ISSUES[num])
}
return '<!-- NREUM: (' + num + ') -->'
}
var browser_monitoring = config.browser_monitoring
// config.browser_monitoring should always exist, but we don't want the agent
// to bail here if something goes wrong
if (!browser_monitoring) return _gracefail(2)
/* Can control header generation with configuration this setting is only
* available in the newrelic.js config file, it is not ever set by the
* server.
*/
if (!browser_monitoring.enable) {
// It has been disabled by the user; no need to warn them about their own
// settings so fail quietly and gracefully.
return _gracefail(0, true)
}
var trans = this.agent.getTransaction()
// bail gracefully outside a transaction
if (!trans) return _gracefail(1)
var name = trans.getName()
/* If we're in an unnamed transaction, add a friendly warning this is to
* avoid people going crazy, trying to figure out why browser monitoring is
* not working when they're missing a transaction name.
*/
if (!name) return _gracefail(3)
var time = trans.timer.getDurationInMillis()
/*
* Only the first 13 chars of the license should be used for hashing with
* the transaction name.
*/
var key = config.license_key.substr(0, 13)
var appid = config.application_id
/* This is only going to work if the agent has successfully handshaked with
* the collector. If the networks is bad, or there is no license key set in
* newrelis.js, there will be no application_id set. We bail instead of
* outputting null/undefined configuration values.
*/
if (!appid) return _gracefail(4)
/* If there is no browser_key, the server has likely decided to disable
* browser monitoring.
*/
var licenseKey = browser_monitoring.browser_key
if (!licenseKey) return _gracefail(5)
/* If there is no agent_loader script, there is no point
* in setting the rum data
*/
var js_agent_loader = browser_monitoring.js_agent_loader
if (!js_agent_loader) return _gracefail(6)
/* If rum is enabled, but then later disabled on the server,
* this is the only parameter that gets updated.
*
* This condition should only be met if rum is disabled during
* the lifetime of an application, and it should be picked up
* on the next ForceRestart by the collector.
*/
var loader = browser_monitoring.loader
if (loader === 'none') return _gracefail(7)
// This hash gets written directly into the browser.
var rum_hash = {
agent: browser_monitoring.js_agent_file,
beacon: browser_monitoring.beacon,
errorBeacon: browser_monitoring.error_beacon,
licenseKey: licenseKey,
applicationID: appid,
applicationTime: time,
transactionName: hashes.obfuscateNameUsingKey(name, key),
queueTime: trans.queueTime,
ttGuid: trans.id,
// we don't use these parameters yet
agentToken: null
}
// if debugging, do pretty format of JSON
var tabs = config.browser_monitoring.debug ? 2 : 0
var json = JSON.stringify(rum_hash, null, tabs)
// the complete header to be written to the browser
var out = util.format(
RUM_STUB,
json,
js_agent_loader
)
logger.trace('generating RUM header', out)
return out
}
/**
* This creates a new tracer with the passed in name. It then wraps the
* callback and binds it to the current transaction and segment so any further
* custom instrumentation as well as auto instrumentation will also be able to
* find the current transaction and segment.
*/
API.prototype.createTracer = function createTracer(name, callback) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/createTracer'
)
metric.incrementCallCount()
// FLAG: custom_instrumentation
if (!this.agent.config.feature_flag.custom_instrumentation) {
return callback
}
var fail = false
if (!name) {
logger.warn('createTracer called without a name')
fail = true
}
if (typeof callback !== 'function') {
logger.warn('createTracer called with a callback arg that is not a function')
fail = true
}
if (fail) {
// If name is undefined but callback is defined we should make a best effort
// to return it so things don't crash.
return callback
}
var tracer = this.agent.tracer
var txn = tracer.getTransaction()
if (!txn) {
logger.debug(
'createTracer called with %s (%s) outside of a transaction, ' +
'unable to create tracer.',
name,
callback && callback.name
)
return callback
}
logger.debug(
'creating tracer %s (%s) on transaction %s.',
name,
callback && callback.name,
txn.id
)
var segment = tracer.createSegment(name, customRecorder)
segment.start()
return tracer.bindFunction(callback, segment, true)
}
/**
* Creates a function that represents a web transaction. It does not start the
* transaction automatically - the returned function needs to be invoked to start it.
* Inside the handler function, the transaction must be ended by calling endTransaction().
*
* @example
* var newrelic = require('newrelic')
* var transaction = newrelic.createWebTransaction('/some/url/path', function() {
* // do some work
* newrelic.endTransaction()
* })
*
* @param {string} url The URL of the transaction. It is used to name and group
related transactions in APM, so it should be a generic
name and not iclude any variable parameters.
* @param {Function} handle Function that represents the transaction work.
*/
API.prototype.createWebTransaction = function createWebTransaction(url, handle) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/createWebTransaction'
)
metric.incrementCallCount()
// FLAG: custom_instrumentation
if (!this.agent.config.feature_flag.custom_instrumentation) {
return handle
}
var fail = false
if (!url) {
logger.warn('createWebTransaction called without an url')
fail = true
}
if (typeof handle !== 'function') {
logger.warn('createWebTransaction called with a handle arg that is not a function')
fail = true
}
if (fail) {
// If name is undefined but handle is defined we should make a best effort
// to return it so things don't crash.
return handle
}
logger.debug(
'creating web transaction generator %s (%s).',
url,
handle && handle.name
)
var tracer = this.agent.tracer
return tracer.transactionNestProxy('web', function createWebSegment() {
var tx = tracer.getTransaction()
logger.debug(
'creating web transaction %s (%s) with transaction id: %s',
url,
handle && handle.name,
tx.id
)
tx.nameState.setName(NAMES.CUSTOM, null, NAMES.ACTION_DELIMITER, url)
tx.url = url
tx.applyUserNamingRules(tx.url)
tx.webSegment = tracer.createSegment(url, recordWeb)
tx.webSegment.start()
return tracer.bindFunction(handle, tx.webSegment).apply(this, arguments)
})
}
/**
* Creates a function that represents a background transaction. It does not start the
* transaction automatically - the returned function needs to be invoked to start it.
* Inside the handler function, the transaction must be ended by calling endTransaction().
*
* @example
* var newrelic = require('newrelic')
* var transaction = newrelic.createBackgroundTransaction('myTransaction', function() {
* // do some work
* newrelic.endTransaction()
* })
*
* @param {string} name The name of the transaction. It is used to name and group
related transactions in APM, so it should be a generic
name and not iclude any variable parameters.
* @param {string} [group] Optional, used for grouping background transactions in
* APM. For more information see:
* https://docs.newrelic.com/docs/apm/applications-menu/monitoring/transactions-page#txn-type-dropdown
* @param {Function} handle Function that represents the background work.
*/
API.prototype.createBackgroundTransaction = createBackgroundTransaction
function createBackgroundTransaction(name, group, handle) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/createBackgroundTransaction'
)
metric.incrementCallCount()
if (handle === undefined && typeof group === 'function') {
handle = group
group = 'Nodejs'
}
// FLAG: custom_instrumentation
if (!this.agent.config.feature_flag.custom_instrumentation) {
return handle
}
var fail = false
if (!name) {
logger.warn('createBackgroundTransaction called without an url')
fail = true
}
if (typeof handle !== 'function') {
logger.warn(
'createBackgroundTransaction called with a handle arg that is not a function'
)
fail = true
}
if (fail) {
// If name is undefined but handle is defined we should make a best effort
// to return it so things don't crash.
return handle
}
logger.debug(
'creating background transaction generator %s:%s (%s)',
name,
group,
handle && handle.name
)
var tracer = this.agent.tracer
return tracer.transactionNestProxy('bg', function createBackgroundSegment() {
var tx = tracer.getTransaction()
logger.debug(
'creating background transaction %s:%s (%s) with transaction id: %s',
name,
group,
handle && handle.name,
tx.id
)
tx.setBackgroundName(name, group)
tx.bgSegment = tracer.createSegment(name, recordBackground)
tx.bgSegment.partialName = group
tx.bgSegment.start()
return tracer.bindFunction(handle, tx.bgSegment).apply(this, arguments)
})
}
API.prototype.endTransaction = function endTransaction() {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/endTransaction'
)
metric.incrementCallCount()
// FLAG: custom_instrumentation
if (!this.agent.config.feature_flag.custom_instrumentation) {
return
}
var tracer = this.agent.tracer
var tx = tracer.getTransaction()
if (tx) {
if (tx.webSegment) {
tx.setName(tx.url, 0)
tx.webSegment.markAsWeb(tx.url)
tx.webSegment.end()
} else if (tx.bgSegment) {
tx.bgSegment.end()
}
logger.debug('ending transaction with id: %s and name: %s', tx.id, tx.name)
tx.end()
} else {
logger.debug('endTransaction() called while not in a transaction.')
}
}
API.prototype.recordMetric = function recordMetric(name, value) {
var supportMetric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/recordMetric'
)
supportMetric.incrementCallCount()
// FLAG: custom_metrics
if (!this.agent.config.feature_flag.custom_metrics) {
return
}
if (typeof name !== 'string') {
logger.warn('Metric name must be a string')
return
}
var metric = this.agent.metrics.getOrCreateMetric(name)
if (typeof value === 'number') {
metric.recordValue(value)
return
}
if (typeof value !== 'object') {
logger.warn('Metric value must be either a number, or a metric object')
return
}
var stats = {}
var required = ['count', 'total', 'min', 'max', 'sumOfSquares']
var keyMap = {count: 'callCount'}
for (var i = 0, l = required.length; i < l; ++i) {
if (typeof value[required[i]] !== 'number') {
logger.warn('Metric object must include ' + required[i] + ' as a number')
return
}
var key = keyMap[required[i]] || required[i]
stats[key] = value[required[i]]
}
if (typeof value.totalExclusive === 'number') {
stats.totalExclusive = value.totalExclusive
} else {
stats.totalExclusive = value.total
}
metric.merge(stats)
}
API.prototype.incrementMetric = function incrementMetric(name, value) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/incrementMetric'
)
metric.incrementCallCount()
// FLAG: custom_metrics
if (!this.agent.config.feature_flag.custom_metrics) {
return
}
if (!value && value !== 0) {
value = 1
}
if (typeof value !== 'number' || value % 1 !== 0) {
logger.warn('Metric Increment value must be an integer')
return
}
this.recordMetric(name, {
count: value,
total: 0,
min: 0,
max: 0,
sumOfSquares: 0
})
}
API.prototype.recordCustomEvent = function recordCustomEvent(eventType, attributes) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/recordCustomEvent'
)
metric.incrementCallCount()
if (!this.agent.config.custom_insights_events.enabled) {
return
}
// Check all the arguments before bailing to give maximum information in a
// single invocation.
var fail = false
if (!eventType || typeof eventType !== 'string') {
logger.warn(
'recordCustomEvent requires a string for its first argument, got %s (%s)',
stringify(eventType),
typeof eventType
)
fail = true
} else if (!CUSTOM_EVENT_TYPE_REGEX.test(eventType)) {
logger.warn(
'recordCustomEvent eventType of %s is invalid, it must match /%s/',
eventType,
CUSTOM_EVENT_TYPE_REGEX.source
)
fail = true
} else if (eventType.length > 255) {
logger.warn(
'recordCustomEvent eventType must have a length less than 256, got %s (%s)',
eventType,
eventType.length
)
fail = true
}
// If they don't pass an attributes object, or the attributes argument is not
// an object, or if it is an object and but is actually an array, log a
// warning and set the fail bit.
if (!attributes || typeof attributes !== 'object' || Array.isArray(attributes)) {
logger.warn(
'recordCustomEvent requires an object for its second argument, got %s (%s)',
stringify(attributes),
typeof attributes
)
fail = true
} else if (_checkKeyLength(attributes, 255)) {
fail = true
}
if (fail) {
return
}
var instrinics = {
type: eventType,
timestamp: Date.now()
}
this.agent.customEvents.add([instrinics, attributes])
}
/**
* Shuts down the agent.
*
* @param {object} [options] object with shut down options
* @param {boolean} [options.collectPendingData=false] If true, the agent will send any
* pending data to the collector
* before shutting down.
* @param {number} [options.timeout] time in ms to wait before
* shutting down
* @param {function} [callback] callback function that runs when
* agent stopped
*/
API.prototype.shutdown = function shutdown(options, cb) {
var metric = this.agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.API + '/shutdown'
)
metric.incrementCallCount()
var callback = cb
if (!callback) {
if (typeof options === 'function') {
callback = options
} else {
callback = function noop() {}
}
}
var agent = this.agent
function cb_harvest(error) {
if (error) {
logger.error(
error,
'An error occurred while running last harvest before shutdown.'
)
}
agent.stop(callback)
}
if (options && options.collectPendingData && agent._state !== 'started') {
if (typeof options.timeout === 'number') {
var shutdownTimeout = setTimeout(function shutdownTimeout() {
agent.stop(callback)
}, options.timeout)
// timer.unref only in 0.9+
if (shutdownTimeout.unref) {
shutdownTimeout.unref()
}
} else if (options.timeout) {
logger.warn(
'options.timeout should be of type "number". Got %s',
typeof options.timeout
)
}
agent.on('started', function shutdownHarvest() {
agent.harvest(cb_harvest)
})
agent.on('errored', function logShutdownError(error) {
agent.stop(callback)
if (error) {
logger.error(
error,
'The agent encountered an error after calling shutdown.'
)
}
})
} else if (options && options.collectPendingData) {
agent.harvest(cb_harvest)
} else {
agent.stop(callback)
}
}
function _checkKeyLength(object, maxLength) {
var keys = Object.keys(object)
var badKey = false
var len = keys.length
var key = '' // init to string because gotta go fast
for (var i = 0; i < len; i++) {
key = keys[i]
if (key.length > maxLength) {
logger.warn(
'recordCustomEvent requires keys to be less than 256 chars got %s (%s)',
key,
key.length
)
badKey = true
}
}
return badKey
}
module.exports = API
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('./lib/logger.js')
var semver = require('semver')
var message
var agent
var agentVersion = require('./package.json').version
logger.info(
"Using New Relic for Node.js. Agent version: %s; Node version: %s.",
agentVersion, process.version
)
Iif (require.cache.__NR_cache) {
logger.warn(
'Attempting to load a second copy of newrelic from %s, using cache instead',
__dirname
)
module.exports = require.cache.__NR_cache
} else {
initialize()
}
function initialize() {
logger.debug(
'Loading agent from %s',
__dirname
)
try {
logger.debug("Process was running %s seconds before agent was loaded.",
process.uptime())
// Technically we run on 0.6, until we verify there are 0 users on 0.6, we
// should leave this code doing a check against 0.6, but then advise that
// people upgrade to one of our officially supported version (0.8 and higher)
Iif (semver.satisfies(process.version, '<0.6.0')) {
message = "New Relic for Node.js requires a version of Node equal to or\n" +
"greater than 0.8.0. Not starting!"
logger.error(message)
throw new Error(message)
}
logger.debug("Current working directory at module load is %s.", process.cwd())
logger.debug("Process title is %s.", process.title)
logger.debug("Application was invoked as %s.", process.argv.join(' '))
var config = require('./lib/config.js').getOrCreateInstance()
// Get the initialized logger as we likely have a bootstrap logger which
// just pipes to stdout.
logger = require('./lib/logger.js')
Eif (!config || !config.agent_enabled) {
logger.info("Module not enabled in configuration; not starting.")
} else {
/* Only load the rest of the module if configuration is available and the
* configurator didn't throw.
*
* The agent must be a singleton, or else module loading will be patched
* multiple times, with undefined results. New Relic's instrumentation
* can't be enabled or disabled without an application restart.
*/
var Agent = require('./lib/agent.js')
agent = new Agent(config)
var appNames = agent.config.applications()
if (config.logging.diagnostics) {
logger.warn(
'Diagnostics logging is enabled, this may cause significant overhead.'
)
}
if (appNames.length < 1) {
message = "New Relic requires that you name this application!\n" +
"Set app_name in your newrelic.js file or set environment variable\n" +
"NEW_RELIC_APP_NAME. Not starting!"
logger.error(message)
throw new Error(message)
}
var shimmer = require('./lib/shimmer.js')
shimmer.patchModule(agent)
shimmer.bootstrapInstrumentation(agent)
agent.start(function cb_start(error) {
if (!error) {
return logger.debug("New Relic for Node.js is connected to New Relic.")
}
var errorMessage = "New Relic for Node.js halted startup due to an error:"
logger.error(error, errorMessage)
console.error(errorMessage)
console.error(error.stack)
})
}
} catch (error) {
message = "New Relic for Node.js was unable to bootstrap itself due to an error:"
logger.error(error, message)
console.error(message)
console.error(error.stack)
}
var API
Iif (agent) {
API = require('./api.js')
} else {
API = require('./stub_api.js')
}
require.cache.__NR_cache = module.exports = new API(agent)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 | 1 | 'use strict'
/**
* New Relic agent configuration.
*
* See lib/config.defaults.js in the agent distribution for a more complete
* description of configuration variables and their potential values.
*/
exports.config = {
/**
* Array of application names.
*/
app_name: ['My Application'],
/**
* Your New Relic license key.
*/
license_key: 'license key here',
logging: {
/**
* Level at which to log. 'trace' is most useful to New Relic when diagnosing
* issues with the agent, 'info' and higher will impose the least overhead on
* production applications.
*/
level: 'info'
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 | 1 1 1 17 1 1 1 1 17 17 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('./lib/logger.js')
var RealAPI = require('./api.js')
/* eslint-disable no-eval */
function stubFunction(name) {
return eval("(function () {return function " + name + "() {" +
"logger.debug('Not calling " + name + " because New Relic is disabled.');" +
"}}())")
}
/* eslint-enable no-eval */
function Stub() {}
var keys = Object.keys(RealAPI.prototype)
var length = keys.length
/* This way the stub API doesn't have to be updated in lockstep with the regular
* API.
*/
for (var i = 0; i < length; i++) {
var functionName = keys[i]
Stub.prototype[functionName] = stubFunction(functionName)
}
Stub.prototype.createTracer = createTracer
Stub.prototype.createWebTransaction = createWebTransaction
Stub.prototype.createBackgroundTransaction = createBackgroundTransaction
Stub.prototype.getBrowserTimingHeader = getBrowserTimingHeader
Stub.prototype.shutdown = shutdown
// This code gets injected into HTML templates
// and we don't want it to return undefined/null.
function getBrowserTimingHeader() {
logger.debug('Not calling getBrowserTimingHeader because New Relic is disabled.')
return ''
}
// Normally the following 3 calls return a wrapped callback, instead we
// should just return the callback in its unwrapped state.
function createTracer(name, callback) {
logger.debug('Not calling createTracer because New Relic is disabled.')
return callback
}
function createWebTransaction(url, callback) {
logger.debug('Not calling createWebTransaction because New Relic is disabled.')
return callback
}
function createBackgroundTransaction(name, group, callback) {
logger.debug('Not calling createBackgroundTransaction because New Relic is disabled.')
return (callback === undefined) ? group : callback
}
// Normally the following call executes callback asynchronously
function shutdown(options, cb) {
logger.debug('Not calling shutdown because New Relic is disabled.')
var callback = cb
if (!callback) {
if (typeof options === 'function') {
callback = options
} else {
callback = new Function()
}
}
process.nextTick(callback)
}
module.exports = Stub
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| ca-gen.js | 10% | (4 / 40) | 0% | (0 / 6) | 0% | (0 / 6) | 10.26% | (4 / 39) | |
| check-native-metrics.js | 62.5% | (15 / 24) | 60% | (12 / 20) | 100% | (5 / 5) | 62.5% | (15 / 24) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 | 2 1 1 1 | //#!/usr/bin/env node
var fs = require('fs')
, path = require('path')
, glob = require('glob')
;
var CERT_PATH = path.join(__dirname, '..', '..', 'SSL_CA_cert_bundle', '*.pem');
var OUTFILE =
path.join(__dirname, '..', 'lib', 'collector', 'ssl', 'certificates.js');
var HEADER =
"/**\n" +
" * certificates.js - CA bundle for SSL communication with RPM.\n" +
" *\n" +
" * This file contains the X509 certificates used to communicate with New Relic\n" +
" * over SSL.\n" +
" */\n\n";
function Certificate() {
this.name = null;
this.body = null;
}
Certificate.prototype.toEntry = function toEntry() {
var output = ' // ' + this.name + '\n';
var rawPEM = this.body.split('\n');
var line;
for (var i = 0; i < rawPEM.length; i++) {
line = rawPEM[i];
// some Thawte certificates have Windows line endings
line = line.replace('\r', '');
if (line.match(/END CERTIFICATE/)) {
output += ' "' + line + '\\n"';
break;
}
else {
output += ' "' + line + '\\n" +\n';
}
}
return output;
};
function loadCerts(root, callback) {
glob(root, function (error, files) {
if (error) return callback(error, null);
var certificates = [];
console.error("Loading %s certficates.", files.length);
var certificate, file;
for (var i = 0; i < files.length; i++) {
file = files[i];
certificate = new Certificate();
certificate.name = path.basename(file, '.pem');
certificate.body = fs.readFileSync(file, 'ascii');
certificates.push(certificate);
}
callback(null, certificates);
});
}
function dumpCerts(error, certs) {
if (error) {
console.error("got %s reading certs; bailing out", error.message);
process.exit(1);
}
fs.writeFileSync(
OUTFILE,
HEADER +
'module.exports = [\n' +
certs.map(function cb_map(cert) { return cert.toEntry(); }).join(',\n\n') +
'\n]\n'
);
}
loadCerts(CERT_PATH, dumpCerts);
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 | 1 1 1 1 1 3 1 1 1 1 3 1 1 1 1 | 'use strict' /* eslint-disable no-console */ // Check the escape hatch before doing anything else. Iif (process.env.NEW_RELIC_SKIP_NATIVE_METRICS) { console.log( 'Skipping install of @newrelic/native-metrics, NEW_RELIC_SKIP_NATIVE_METRICS is set.' ) process.exit(0) } var cp = require('child_process') // NOTE This script is in javascript instead of bash because we want it to work // on Windows and bash scripts don't work there. // TODO Remove this script and put @newrelic/native-metrics back as an optional // dependency when we drop support for Node v0.8. cp.exec('npm --version', function npmVersionCB(err, npmVersionStr) { Iif (err) { console.log( 'Skipping install of @newrelic/native-metrics, could not determine NPM version.' ) return } var npmVersion = (npmVersionStr || '').split('.').map(function versionMap(a) { return parseInt(a, 10) }) Eif (npmVersion.length >= 3 && npmVersion[0] >= 2) { cp.exec('node --version', function nodeVersionCB(err, nodeVersionStr) { Iif (err) { console.log( 'Skipping install of @newrelic/native-metrics, could not determine ' + 'Node version.' ) } var nodeVersion = (nodeVersionStr || '').substr(1).split('.') .map(function versionMap(a) { return parseInt(a, 10) } ) Eif (nodeVersion.length >= 3 && nodeVersion[0] !== 5) { console.log( 'Installing @newrelic/native-metrics with npm ' + npmVersionStr.trim() + ' on Node ' + nodeVersionStr.trim() + '. This may take a moment.' ) cp.exec('npm install @newrelic/native-metrics', function installCB(err) { Iif (err) { console.log('Failed to install @newrelic/native-metrics') console.log(err) } }) } else { console.log( 'Skipping install of @newrelic/native-metrics due to unsupported ' + 'version of Node. See the New Relic documentation for details on ' + 'compatibility and requirements for this feature.' ) } }) } else { console.log( 'Skipping install of @newrelic/native-metrics due to unsupported version ' + 'of NPM. See the New Relic documentation for details on compatibility and ' + 'requirements for this feature.' ) } }) |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| agent.js | 12.89% | (58 / 450) | 0% | (0 / 197) | 0% | (0 / 58) | 13.43% | (58 / 432) | |
| aws-info.js | 18.75% | (12 / 64) | 0% | (0 / 21) | 0% | (0 / 12) | 19.67% | (12 / 61) | |
| config.default.js | 100% | (1 / 1) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (1 / 1) | |
| config.js | 24.75% | (101 / 408) | 5.65% | (14 / 248) | 10.87% | (5 / 46) | 25.19% | (98 / 389) | |
| environment.js | 78.65% | (140 / 178) | 57.83% | (48 / 83) | 63.33% | (19 / 30) | 78.95% | (135 / 171) | |
| feature_flags.js | 100% | (3 / 3) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (3 / 3) | |
| instrumentations.js | 100% | (2 / 2) | 100% | (0 / 0) | 100% | (1 / 1) | 100% | (2 / 2) | |
| logger.js | 25% | (5 / 20) | 14.29% | (1 / 7) | 0% | (0 / 1) | 25% | (5 / 20) | |
| parse-dockerinfo.js | 13.16% | (5 / 38) | 0% | (0 / 14) | 0% | (0 / 4) | 14.29% | (5 / 35) | |
| parse-proc-cpuinfo.js | 10.91% | (6 / 55) | 0% | (0 / 17) | 0% | (0 / 9) | 10.91% | (6 / 55) | |
| parse-proc-meminfo.js | 37.5% | (3 / 8) | 0% | (0 / 2) | 0% | (0 / 1) | 42.86% | (3 / 7) | |
| reservoir.js | 25% | (7 / 28) | 0% | (0 / 16) | 0% | (0 / 6) | 28% | (7 / 25) | |
| sampler.js | 19.2% | (24 / 125) | 0% | (0 / 28) | 0% | (0 / 25) | 19.35% | (24 / 124) | |
| shimmer.js | 7.63% | (9 / 118) | 0% | (0 / 52) | 0% | (0 / 18) | 8.82% | (9 / 102) | |
| system-info.js | 14.39% | (20 / 139) | 0% | (0 / 64) | 0% | (0 / 29) | 14.81% | (20 / 135) | |
| timer.js | 21.05% | (20 / 95) | 0% | (0 / 49) | 0% | (0 / 15) | 25% | (20 / 80) | |
| uninstrumented.js | 70.59% | (24 / 34) | 50% | (6 / 12) | 50% | (3 / 6) | 68.75% | (22 / 32) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var util = require('util')
var EventEmitter = require('events').EventEmitter
var Reservoir = require('./reservoir.js')
var logger = require('./logger.js')
var sampler = require('./sampler.js')
var NAMES = require('./metrics/names.js')
var CollectorAPI = require('./collector/api.js')
var ErrorAggregator = require('./errors/aggregator')
var Metrics = require('./metrics')
var MetricNormalizer = require('./metrics/normalizer.js')
var TxSegmentNormalizer = require('./metrics/normalizer/tx_segment.js')
var MetricMapper = require('./metrics/mapper.js')
var TraceAggregator = require('./transaction/trace/aggregator.js')
var hashes = require('./util/hashes')
var uninstrumented = require('./uninstrumented.js')
var QueryTracer = require('./db/tracer')
/*
*
* CONSTANTS
*
*/
var STATES = [
'stopped', // start state
'starting', // handshaking with NR
'connected', // connected to collector
'disconnected', // disconnected from collector
'started', // up and running
'stopping', // shutting down
'errored' // stopped due to error
]
// just to make clear what's going on
var TO_MILLIS = 1e3
var FROM_MILLIS = 1e-3
// Check for already loaded modules and warn about them. This must be executed
// only once, at the first require of this file, or else we have problems in
// unit tests.
uninstrumented.check()
/**
* There's a lot of stuff in this constructor, due to Agent acting as the
* orchestrator for New Relic within instrumented applications.
*
* This constructor can throw if, for some reason, the configuration isn't
* available. Don't try to recover here, because without configuration the
* agent can't be brought up to a useful state.
*/
function Agent(config) {
EventEmitter.call(this)
if (!config) throw new Error("Agent must be created with a configuration!")
// The agent base attributes which last throughout its lifetime.
this._state = 'stopped'
this.config = config
this.environment = require('./environment')
this.version = this.config.version
this.collector = new CollectorAPI(this)
// Reset the agent to add all the sub-objects it needs. These object are the
// ones that get re-created if the agent is told to restart from the collector.
this.events = null
this.customEvents = null
this.errors = null
this.mapper = null
this.metricNameNormalizer = null
this.metrics = null
this.transactionNameNormalizer = null
this.urlNormalizer = null
this.txSegmentNormalizer = null
this.userNormalizer = null
this.reset()
// Transaction tracing.
this.tracer = this._setupTracer()
this.traces = new TraceAggregator(this.config)
// Query tracing.
this.queries = new QueryTracer(this.config)
// Set up all the configuration events the agent needs to listen for.
var self = this
this.config.on('apdex_t', this._apdexTChange.bind(this))
this.config.on('data_report_period', this._harvesterIntervalChange.bind(this))
this.config.on('agent_enabled', this._enabledChange.bind(this))
this.config.on('change', this._configChange.bind(this))
this.config.on('metric_name_rules', function updateMetricNameNormalizer() {
self.metricNameNormalizer.load.apply(self.metricNameNormalizer, arguments)
})
this.config.on('transaction_name_rules', function updateTransactionNameNormalizer() {
self.transactionNameNormalizer.load.apply(self.transactionNameNormalizer, arguments)
})
this.config.on('url_rules', function updateUrlNormalizer() {
self.urlNormalizer.load.apply(self.urlNormalizer, arguments)
})
this.config.on('transaction_segment_terms', function updateSegmentNormalizer() {
self.txSegmentNormalizer.load.apply(self.txSegmentNormalizer, arguments)
})
// Entity tracking metrics.
this.totalActiveSegments = 0
this.segmentsCreatedInHarvest = 0
this.segmentsClearedInHarvest = 0
this.activeTransactions = 0
// Hidden class optimizations.
this.harvesterHandle = null
// Finally, add listeners for the agent's own events.
this.on('transactionFinished', this._transactionFinished.bind(this))
}
util.inherits(Agent, EventEmitter)
/**
* The agent is meant to only exist once per application, but the singleton is
* managed by index.js. An agent will be created even if the agent's disabled by
* the configuration.
*
* @config {boolean} agent_enabled Whether to start up the agent.
*
* @param {Function} callback Continuation and error handler.
*/
Agent.prototype.start = function start(callback) {
if (!callback) throw new TypeError("callback required!")
var agent = this
this.setState('starting')
if (this.config.agent_enabled !== true) {
logger.warn("The New Relic Node.js agent is disabled by its configuration. " +
"Not starting!")
this.setState('stopped')
return process.nextTick(callback)
}
if (!(this.config.license_key)) {
logger.error("A valid account license key cannot be found. " +
"Has a license key been specified in the agent configuration " +
"file or via the NEW_RELIC_LICENSE_KEY environment variable?")
this.setState('errored')
return process.nextTick(function cb_nextTick() {
callback(new Error("Not starting without license key!"))
})
}
sampler.start(agent)
logger.info("Starting New Relic for Node.js connection process.")
this.collector.connect(function cb_connect(error, config) {
if (error) {
agent.setState('errored')
return callback(error, config)
}
if (agent.collector.isConnected() && !agent.config.no_immediate_harvest) {
// harvest immediately for quicker data display, but after at least 1
// second or the collector will throw away the data.
setTimeout(function one_sec_delayed_harvest() {
agent.harvest(function cb_harvest(error) {
agent._startHarvester(agent.config.data_report_period)
agent.setState('started')
callback(error, config)
})
}, 1000)
} else {
process.nextTick(function cb_nextTick() {
callback(null, config)
})
}
})
}
/**
* Any memory claimed by the agent will be retained after stopping.
*
* FIXME: make it possible to dispose of the agent, as well as do a
* "hard" restart. This requires working with shimmer to strip the
* current instrumentation and patch to the module loader.
*/
Agent.prototype.stop = function stop(callback) {
if (!callback) throw new TypeError("callback required!")
var agent = this
this.setState('stopping')
this._stopHarvester()
sampler.stop()
if (this.collector.isConnected()) {
this.collector.shutdown(function cb_shutdown(error) {
if (error) {
agent.setState('errored')
logger.warn(error, "Got error shutting down connection to New Relic:")
} else {
agent.setState('stopped')
logger.info("Stopped New Relic for Node.js.")
}
callback(error)
})
} else {
process.nextTick(callback)
}
}
/**
* Builds all of the sub-properties of the agent that rely on configurations.
*/
Agent.prototype.reset = function reset() {
// Insights events.
if (!this.events) {
this.events = new Reservoir()
}
this.events.setLimit(this.config.transaction_events.max_samples_per_minute)
if (!this.customEvents) {
this.customEvents = new Reservoir()
}
this.customEvents.setLimit(this.config.custom_insights_events.max_samples_stored)
// Error tracing.
if (!this.errors) {
this.errors = new ErrorAggregator(this.config)
}
this.errors.reconfigure(this.config)
// Metrics.
this.mapper = new MetricMapper()
this.metricNameNormalizer = new MetricNormalizer(this.config, 'metric name')
this.metrics = new Metrics(this.config.apdex_t, this.mapper, this.metricNameNormalizer)
// Transaction naming.
this.transactionNameNormalizer = new MetricNormalizer(this.config, 'transaction name')
this.urlNormalizer = new MetricNormalizer(this.config, 'URL')
// Segment term based tx renaming for MGI mitigation.
this.txSegmentNormalizer = new TxSegmentNormalizer()
// User naming and ignoring rules.
this.userNormalizer = new MetricNormalizer(this.config, 'user')
this.userNormalizer.loadFromConfig()
// Supportability.
if (this.config.debug.internal_metrics) {
this.config.debug.supportability = new Metrics(
this.config.apdex_t,
this.mapper,
this.metricNameNormalizer
)
}
}
/**
* On agent startup, an interval timer is started that calls this method once
* a minute, which in turn invokes the pieces of the harvest cycle. It calls
* the various collector API methods in order, bailing out if one of them fails,
* to ensure that the agents don't pummel the collector if it's already
* struggling.
*/
Agent.prototype.harvest = function harvest(callback) {
if (!callback) throw new TypeError("callback required!")
var agent = this
var harvestSteps = [
'_sendMetrics',
'_sendErrors',
'_sendTrace',
'_sendEvents',
'_sendCustomEvents',
'_sendQueries',
'_sendErrorEvents'
]
logger.trace({
segmentTotal: this.totalActiveSegments,
harvestCreated: this.segmentsCreatedInHarvest,
harvestCleared: this.segmentsClearedInHarvest,
activeTransactions: this.activeTransactions
}, 'Entity stats on harvest')
this.segmentsCreatedInHarvest = 0
this.segmentsClearedInHarvest = 0
if (!this.collector.isConnected()) {
return process.nextTick(function cb_nextTick() {
callback(new Error("Not connected to New Relic!"))
})
}
runHarvestStep(0)
function runHarvestStep(n) {
agent[harvestSteps[n++]](next)
function next(error) {
if (error || n >= harvestSteps.length) return callback(error)
runHarvestStep(n)
}
}
}
/**
* Public interface for passing configuration data from the collector
* on to the configuration, in an effort to keep them at least somewhat
* decoupled.
*
* @param {object} configuration New config JSON from the collector.
*/
Agent.prototype.reconfigure = function reconfigure(configuration) {
if (!configuration) throw new TypeError("must pass configuration")
this.config.onConnect(configuration)
}
/**
* Make it easier to determine what state the agent thinks it's in (needed
* for a few tests, but fragile).
*
* FIXME: remove the need for this
*
* @param {string} newState The new state of the agent.
*/
Agent.prototype.setState = function setState(newState) {
if (STATES.indexOf(newState) === -1) {
throw new TypeError("Invalid state " + newState)
}
logger.debug("Agent state changed from %s to %s.", this._state, newState)
this._state = newState
this.emit(this._state)
}
/**
* Server-side configuration value.
*
* @param {number} apdexT Apdex tolerating value, in seconds.
*/
Agent.prototype._apdexTChange = function _apdexTChange(apdexT) {
logger.debug("Apdex tolerating value changed to %s.", apdexT)
this.metrics.apdexT = apdexT
if (this.config.debug.supportability) {
this.config.debug.supportability.apdexT = apdexT
}
}
/**
* Server-side configuration value. When run, forces a harvest cycle
* so as to not cause the agent to go too long without reporting.
*
* @param {number} interval Time in seconds between harvest runs.
*/
Agent.prototype._harvesterIntervalChange = _harvesterIntervalChange
function _harvesterIntervalChange(interval, callback) {
var agent = this
// only change the setup if the harvester is currently running
if (this.harvesterHandle) {
// force a harvest now, to be safe
this.harvest(function cb_harvest(error) {
agent._restartHarvester(interval)
if (callback) callback(error)
})
} else if (callback) {
process.nextTick(callback)
}
}
/**
* Restart the harvest cycle timer.
*
* @param {number} harvestSeconds How many seconds between harvests.
*/
Agent.prototype._restartHarvester = function _restartHarvester(harvestSeconds) {
this._stopHarvester()
this._startHarvester(harvestSeconds)
}
/**
* Safely stop the harvest cycle timer.
*/
Agent.prototype._stopHarvester = function _stopHarvester() {
if (this.harvesterHandle) clearInterval(this.harvesterHandle)
this.harvesterHandle = undefined
}
/**
* Safely start the harvest cycle timer, and ensure that the harvest
* cycle won't keep an application from exiting if nothing else is
* happening to keep it up.
*
* @param {number} harvestSeconds How many seconds between harvests.
*/
Agent.prototype._startHarvester = function _startHarvester(harvestSeconds) {
var agent = this
function onError(error) {
if (error) {
logger.info(error, "Error on submission to New Relic (data held for redelivery):")
}
}
function harvester() {
agent.harvest(onError)
}
this.harvesterHandle = setInterval(harvester, harvestSeconds * TO_MILLIS)
// timer.unref is 0.9+
if (this.harvesterHandle.unref) this.harvesterHandle.unref()
}
/**
* `agent_enabled` changed. This will generally only happen because of a high
* security mode mismatch between the agent and the collector. This only
* expects to have to stop the agent. No provisions have been made, nor
* testing have been done to make sure it is safe to start the agent back up.
*/
Agent.prototype._enabledChange = function _enabledChange() {
if (this.config.agent_enabled === false) {
logger.warn('agent_enabled has been changed to false, stopping the agent.')
this.stop(function nop() {})
}
}
/**
* Report new settings to collector after a configuration has changed. This
* always occurs after handling a response from a connect call.
*/
Agent.prototype._configChange = function _configChange() {
this.collector.reportSettings()
}
/**
* To develop the current transaction tracer, I created a tracing tracer that
* tracks when transactions, segments and function calls are proxied. This is
* used by the tests, but can also be dumped and logged, and is useful for
* figuring out where in the execution chain tracing is breaking down.
*
* @param object config Agent configuration.
*
* @returns Tracer Either a debugging or production transaction tracer.
*/
Agent.prototype._setupTracer = function _setupTracer() {
var Tracer = require('./transaction/tracer')
return new Tracer(this)
}
/**
* The pieces of supportability metrics are scattered all over the place -- only
* send supportability metrics if they're explicitly enabled in the
* configuration.
*
* @param {Function} callback Gets any delivery errors.
*/
Agent.prototype._sendMetrics = function _sendMetrics(callback) {
var agent = this
if (this.collector.isConnected()) {
if (this.errors.getTotalErrorCount() > 0) {
var count = this.errors.getTotalErrorCount()
this.metrics.getOrCreateMetric(NAMES.ERRORS.ALL).incrementCallCount(count)
count = this.errors.getWebTransactionsErrorCount()
this.metrics.getOrCreateMetric(NAMES.ERRORS.WEB).incrementCallCount(count)
count = this.errors.getBackgroundTransactionsErrorCount()
this.metrics.getOrCreateMetric(NAMES.ERRORS.OTHER).incrementCallCount(count)
}
if (this.config.debug.supportability) {
this.metrics.merge(this.config.debug.supportability)
}
// Send uninstrumented supportability metrics every harvest cycle
uninstrumented.createMetrics(this.metrics)
this._processCustomEvents()
this._processErrorEvents()
// wait to check until all the standard stuff has been added
if (this.metrics.toJSON().length < 1) {
logger.debug("No metrics to send.")
return process.nextTick(callback)
}
var metrics = this.metrics
var beginSeconds = metrics.started * FROM_MILLIS
var endSeconds = Date.now() * FROM_MILLIS
var payload = [this.config.run_id, beginSeconds, endSeconds, metrics]
// reset now to avoid losing metrics that come in after delivery starts
this.metrics = new Metrics(
this.config.apdex_t,
this.mapper,
this.metricNameNormalizer
)
this.collector.metricData(payload, function cb_metricData(error, rules) {
if (error) agent.metrics.merge(metrics)
if (rules) agent.mapper.load(rules)
callback(error)
})
} else {
process.nextTick(function cb_nextTick() {
callback(new Error("not connected to New Relic (metrics will be held)"))
})
}
}
/**
* This function takes the custom events reservoir, gets stats on it for
* metric purposes, then instantiates a new custom events reservoir. This is
* so the stats are consistent with what actually gets pushed by the later
* call to _sendCustomEvents.
*/
Agent.prototype._processCustomEvents = function _processCustomEvents() {
this.customEventsPool = this.customEvents.toArray()
// Create the metrics so they are at least set to 0
var dropped = this.metrics.getOrCreateMetric(NAMES.CUSTOM_EVENTS.DROPPED)
var seen = this.metrics.getOrCreateMetric(NAMES.CUSTOM_EVENTS.SEEN)
var sent = this.metrics.getOrCreateMetric(NAMES.CUSTOM_EVENTS.SENT)
// Bail out if there are no events
if (this.customEventsPool.length === 0) {
return
}
if (this.config.custom_insights_events.enabled) {
// Record their values
var diff = this.customEvents.overflow()
dropped.incrementCallCount(diff)
seen.incrementCallCount(this.customEvents.seen)
sent.incrementCallCount(this.customEvents.seen - diff)
// Log any warnings about dropping events
if (diff) {
logger.warn('Dropped %s custom events out of %s.', diff, this.customEvents.seen)
}
// Create a new reservoir now (instead of at send time) so metrics match
// what we actually send.
this.customEvents = new Reservoir(
this.config.custom_insights_events.max_samples_stored
)
} else if (this.customEventsPool.length > 0) {
// We have events and custom events are disabled. Clear everything out so we
// don't hold onto memory that we shouldn't. Only time this could happen is
// if the server sent down settings disabling custom events in the middle of
// a harvest cycle.
this.customEventsPool = []
this.customEvents = new Reservoir(
this.config.custom_insights_events.max_samples_stored
)
}
}
/**
* This function takes the error events reservoir, gets stats on it for
* metric purposes, then instantiates a new error events reservoir. This is
* so the stats are consistent with what actually gets pushed by the later
* call to _sendErrorEvents.
*/
Agent.prototype._processErrorEvents = function _processErrorEvents() {
var events = this.errors.getEvents()
this._lastErrorEvents = [
this.errors.getEventsLimit(),
this.errors.getEventsSeen(),
events
]
// Create the metrics so they are at least set to 0
var seen = this.metrics.getOrCreateMetric(NAMES.TRANSACTION_ERROR.SEEN)
var sent = this.metrics.getOrCreateMetric(NAMES.TRANSACTION_ERROR.SENT)
// Bail out if there are no events
if (events.length === 0) {
return
}
if (this.config.error_collector.capture_events) {
// Record their values
var diff = this.errors.events.overflow()
seen.incrementCallCount(this.errors.events.seen)
sent.incrementCallCount(this.errors.events.seen - diff)
// Log any warnings about dropping events
if (diff) {
logger.warn('Dropped %s error events out of %s.', diff, this.errors.events.seen)
}
// clear the reservoir now (instead of at send time) so metrics match
// what we actually send.
this.errors.clearEvents()
} else if (events.length > 0) {
// We have events and error events are disabled. Clear everything out so we
// don't hold onto memory that we shouldn't. Only time this could happen is
// if the server sent down settings disabling error events in the middle of
// a harvest cycle.
this._lastErrorEvents = []
this.errors.clearEvents()
}
}
/**
* The error tracer doesn't know about the agent, and the connection
* doesn't know about the error tracer. Only the agent knows about both.
*
* @param {Function} callback Gets any delivery errors.
*/
Agent.prototype._sendErrors = function _sendErrors(callback) {
var agent = this
if (this.config.collect_errors && this.config.error_collector.enabled) {
if (!this.collector.isConnected()) {
return process.nextTick(function cb_nextTick() {
callback(new Error("not connected to New Relic (errors will be held)"))
})
} else if (this.errors.getTotalErrorCount() < 1) {
logger.debug("No errors to send.")
return process.nextTick(callback)
}
var errors = this.errors.getErrors()
var payload = [this.config.run_id, errors]
// reset now to avoid losing errors that come in after delivery starts
this.errors.clearErrors()
this.collector.errorData(payload, function cb_errorData(error) {
if (error) agent.errors.merge(errors)
callback(error)
})
} else {
/**
* Reset the errors object even if collection is disabled due to error
* counting. Also covers the case where the error collector gets disabled
* in the middle of a harvest cycle so the agent doesn't continue to hold
* on to the errors it had collected during the harvest cycle so far.
*/
this.errors.clearErrors()
process.nextTick(callback)
}
}
/**
* The trace aggregator has its own harvester, which is already
* asynchronous, due to its need to compress the nested transaction
* trace data.
*
* @param {Function} callback Gets any encoding or delivery errors.
*/
Agent.prototype._sendTrace = function _sendTrace(callback) {
var agent = this
if (this.config.collect_traces && this.config.transaction_tracer.enabled) {
if (!this.collector.isConnected()) {
return process.nextTick(function cb_nextTick() {
callback(new Error("not connected to New Relic (slow trace data will be held)"))
})
}
this.traces.harvest(function cb_harvest(error, traces, trace) {
if (error || !traces || traces.length === 0) return callback(error)
var payload = [agent.config.run_id, traces]
agent.collector.transactionSampleData(
payload,
function cb_transactionSampleData(error) {
if (!error) agent.traces.reset(trace)
callback(error)
}
)
})
} else {
process.nextTick(callback)
}
}
Agent.prototype._sendEvents = function _sendEvents(callback) {
if (this.config.transaction_events.enabled) {
var agent = this
var events = agent.events
var sample = events.toArray()
var run_id = agent.config.run_id
// bail if there are no events
if (sample.length < 1) {
return process.nextTick(callback)
}
var metrics = {
reservoir_size: events.limit,
events_seen: events.seen
}
var payload = [
run_id,
metrics,
sample
]
// clear events
agent.events = new Reservoir(agent.config.transaction_events.max_samples_per_minute)
// send data to collector
agent.collector.analyticsEvents(payload, function cb_analyticsEvents(err) {
if (err && err.statusCode === 413 ) {
logger.warn('request too large; event data dropped')
} else if (err) {
logger.warn('analytics events failed to send; re-sampling')
// boost the limit if a connection fails
// and re-aggregate on failure
var newlimit = agent.config.transaction_events.max_samples_stored
agent.events.limit = newlimit
for (var k = 0; k < sample.length; k++) agent.events.add(sample[k])
} else {
// if we had to limit events and sample them, emit a warning
var diff = events.overflow()
if (diff > 0) logger.warn(
'analytics event overflow, dropped %d events; ' +
'try increasing your limit above %d',
diff, events.limit
)
}
callback(err)
})
} else {
process.nextTick(callback)
}
}
/**
* This is separate from _sendEvents because of potential post size problems.
* _processCustomEvents needs to happen before _sendCustomEvents. In the
* normal case it will have happened in _sendMetrics but if you are testing
* this or trying to use it directly for some reason you'll need to call
* _processCustomEvents first.
*/
Agent.prototype._sendCustomEvents = function _sendCustomEvents(callback) {
// Must be enabled and actually have events to send, otherwise bail and nextTick
if (this.config.custom_insights_events.enabled && this.customEventsPool.length > 0) {
var agent = this
var run_id = agent.config.run_id
var payload = [
run_id,
agent.customEventsPool
]
// send data to collector
agent.collector.customEvents(payload, function cb_customEvents(err) {
if (err && err.statusCode === 413 ) {
var tooLarge = agent.metrics.getOrCreateMetric(NAMES.CUSTOM_EVENTS.TOO_LARGE)
tooLarge.incrementCallCount()
logger.warn('request too large; custom event data dropped')
} else if (err) {
var failed = agent.metrics.getOrCreateMetric(NAMES.CUSTOM_EVENTS.FAILED)
failed.incrementCallCount()
logger.warn('custom events failed to send; re-sampling')
for (var i = 0; i < agent.customEventsPool.length; i++) {
agent.customEvents.add(agent.customEventsPool[i])
}
}
callback(err)
})
} else {
process.nextTick(callback)
}
}
Agent.prototype._sendQueries = function _sendQueries(callback) {
var agent = this
var queries = this.queries
this.queries = new QueryTracer(agent.config)
if (!this.config.slow_sql.enabled) {
logger.debug('Slow Query is not enabled.')
return process.nextTick(callback)
}
if (Object.keys(queries.samples).length < 1) {
logger.debug('No queries to send.')
return process.nextTick(callback)
}
queries.prepareJSON(function gotJSON(err, data) {
if (err) {
this.queries.merge(queries)
logger.debug('Error while serializing query data: %s', err.message)
return callback(err)
}
agent.collector.queryData([data], function handleResponse(error) {
if (error) agent.queries.merge(queries)
callback(error)
})
})
}
Agent.prototype._sendErrorEvents = function _sendErrorEvents(callback) {
if (this.config.error_collector.capture_events && this._lastErrorEvents &&
this._lastErrorEvents[2].length > 0) {
var agent = this
var eventsLimit = this._lastErrorEvents[0]
var eventsSeen = this._lastErrorEvents[1]
var events = this._lastErrorEvents[2]
var run_id = agent.config.run_id
if (events.length < 1) {
return process.nextTick(callback)
}
var metrics = {
reservoir_size: eventsLimit,
events_seen: eventsSeen
}
var payload = [
run_id,
metrics,
events
]
// send data to collector
agent.collector.errorEvents(payload, function cb_errorEvents(err) {
if (err && err.statusCode === 413 ) {
logger.warn('request too large; event data dropped')
} else if (err) {
logger.warn('error events failed to send; re-sampling')
agent.errors.mergeEvents(events)
}
callback(err)
})
} else {
process.nextTick(callback)
}
}
Agent.prototype._addIntrinsicAttrsFromTransaction = _addIntrinsicAttrsFromTransaction
function _addIntrinsicAttrsFromTransaction(transaction) {
var intrinsicAttributes = {
webDuration: transaction.timer.duration / 1000,
timestamp: transaction.timer.start,
name: transaction.name,
duration: transaction.timer.duration / 1000,
type: 'Transaction',
error: transaction.hasErrors()
}
var metric = transaction.metrics.getMetric(NAMES.QUEUETIME)
if (metric) {
intrinsicAttributes.queueDuration = metric.total
}
metric = transaction.metrics.getMetric(NAMES.EXTERNAL.ALL)
if (metric) {
intrinsicAttributes.externalDuration = metric.total
intrinsicAttributes.externalCallCount = metric.callCount
}
metric = transaction.metrics.getMetric(NAMES.DB.ALL)
if (metric) {
intrinsicAttributes.databaseDuration = metric.total
intrinsicAttributes.databaseCallCount = metric.callCount
}
// FLAG: cat
if (this.config.feature_flag.cat) {
if (!transaction.invalidIncomingExternalTransaction &&
(
transaction.referringTransactionGuid ||
transaction.includesOutboundRequests()
)
) {
intrinsicAttributes['nr.guid'] = transaction.id
intrinsicAttributes['nr.tripId'] = transaction.tripId || transaction.id
intrinsicAttributes['nr.pathHash'] = hashes.calculatePathHash(
this.config.applications()[0],
transaction.name || transaction.nameState.getName(),
transaction.referringPathHash
)
if (transaction.referringPathHash) {
intrinsicAttributes['nr.referringPathHash'] = transaction.referringPathHash
}
if (transaction.referringTransactionGuid) {
var refId = transaction.referringTransactionGuid
intrinsicAttributes['nr.referringTransactionGuid'] = refId
}
var alternatePathHashes = transaction.alternatePathHashes()
if (alternatePathHashes) {
intrinsicAttributes['nr.alternatePathHashes'] = alternatePathHashes
}
if (transaction.webSegment) {
var apdex = (this.config.web_transactions_apdex[transaction.name] ||
this.config.apdex_t)
var duration = transaction.webSegment.getDurationInMillis() / 1000
intrinsicAttributes['nr.apdexPerfZone'] = calculateApdexZone(duration, apdex)
}
}
}
if (transaction.syntheticsData) {
intrinsicAttributes["nr.syntheticsResourceId"] = transaction.syntheticsData.resourceId
intrinsicAttributes["nr.syntheticsJobId"] = transaction.syntheticsData.jobId
intrinsicAttributes["nr.syntheticsMonitorId"] = transaction.syntheticsData.monitorId
}
return intrinsicAttributes
}
function calculateApdexZone(duration, apdexT) {
if (duration <= apdexT) {
return 'S' // satisfied
}
if (duration <= apdexT * 4) {
return 'T' // tolerating
}
return 'F' // frustrated
}
Agent.prototype._addEventFromTransaction = _addEventFromTransaction
function _addEventFromTransaction(transaction) {
if (!this.config.transaction_events.enabled) return
var intrinsicAttributes = this._addIntrinsicAttrsFromTransaction(transaction)
var userAttributes = transaction.trace.custom
var agentAttributes = transaction.trace.parameters
var event = [
intrinsicAttributes,
userAttributes,
agentAttributes
]
this.events.add(event)
}
/**
* Put all the logic for handing finalized transactions off to the tracers and
* metric collections in one place.
*
* @param {Transaction} transaction Newly-finalized transaction.
*/
Agent.prototype._transactionFinished = function _transactionFinished(transaction) {
// only available when this.config.debug.tracer_tracing is true
if (transaction.describer) {
logger.trace({trace_dump: transaction.describer.verbose}, 'Dumped transaction state.')
}
// Allow the API to explicitly set the ignored status on bg-tx.
// This is handled for web-tx when setName is called on the tx.
if (!transaction.isWeb() && transaction.forceIgnore !== null) {
transaction.ignore = transaction.forceIgnore
}
if (!transaction.ignore) {
if (transaction.forceIgnore === false) {
logger.debug("Explicitly not ignoring %s.", transaction.name)
}
this.metrics.merge(transaction.metrics)
this.errors.onTransactionFinished(transaction, this.metrics)
this.traces.add(transaction)
var trace = transaction.trace
trace.intrinsics = transaction.getIntrinsicAttributes()
this._addEventFromTransaction(transaction)
} else if (transaction.forceIgnore === true) {
logger.debug("Explicitly ignoring %s.", transaction.name)
} else {
logger.debug("Ignoring %s.", transaction.name)
}
this.activeTransactions--
this.totalActiveSegments -= transaction.numSegments
this.segmentsClearedInHarvest += transaction.numSegments
}
/**
* Get the current transaction (if there is one) from the tracer.
*
* @returns {Transaction} The current transaction.
*/
Agent.prototype.getTransaction = function getTransaction() {
return this.tracer.getTransaction()
}
module.exports = Agent
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 | 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('./logger.js').child({component: 'aws-info'})
var http = require('http')
var NAMES = require('./metrics/names.js')
var concat = require('concat-stream')
module.exports = fetchAWSInfo
module.exports.clearCache = function clearAWSCache() {
resultDict = null
}
var resultDict
function fetchAWSInfo(agent, callback) {
if (!agent.config.utilization || !agent.config.utilization.detect_aws) {
return callback(null)
}
if (resultDict) {
return callback(resultDict)
}
var awsQuery = module.exports._awsQuery
awsQuery('instance-type', agent, function getInstanceType(type) {
if (!type) return callback(null)
awsQuery('instance-id', agent, function getInstanceId(id) {
if (!id) return callback(null)
awsQuery('placement/availability-zone', agent, function getZone(zone) {
if (!zone) return callback(null)
resultDict = {
type: type,
id: id,
zone: zone
}
return callback(resultDict)
})
})
})
}
module.exports._awsQuery = function awsQuery(key, agent, callback) {
var instanceHost = '169.254.169.254'
var apiVersion = '2008-02-01'
var url = ['http:/', instanceHost, apiVersion, 'meta-data', key].join('/')
var req = http.get(url, function awsRequest(res) {
res.pipe(concat(respond))
function respond(data) {
var valid = checkResponseString(data)
if (!valid) {
var awsError = agent.metrics.getOrCreateMetric(NAMES.UTILIZATION.AWS_ERROR)
awsError.incrementCallCount()
logger.debug('Response for attribute ' + key + ': %s'
, data)
data = null
} else {
data = data.toString('utf8')
}
agent.removeListener('errored', abortRequest)
agent.removeListener('stopped', abortRequest)
callback(data)
}
})
req.setTimeout(1000, function awsTimeout() {
logger.debug('Request for attribute %s timed out', key)
callback(null)
})
req.on('error', function awsError(err) {
logger.debug('Message for attribute %s: %s', key, err.message)
callback(null)
})
agent.once('errored', abortRequest)
agent.once('stopped', abortRequest)
function abortRequest() {
logger.debug('Abborting request for attribute %s', key)
req.abort()
agent.removeListener('errored', abortRequest)
agent.removeListener('stopped', abortRequest)
}
}
function checkResponseString(str) {
var validCharacters = /[0-9a-zA-Z_ ./-]/
var valid = str.length <= 255 && str.length > 0
var i = 0
var len = str.length
while (valid && i < len) {
valid = valid && (str[i] > 127 || String.fromCharCode(str[i]).match(validCharacters))
i++
}
return valid
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 | 1 | 'use strict'
/**
* This file includes all of the configuration variables used by the Node.js
* module. If there's a configurable element of the module and it's not
* described in here, there's been a terrible mistake.
*/
exports.config = {
/**
* Array of application names.
*
* @env NEW_RELIC_APP_NAME
*/
app_name: [],
/**
* The user's license key. Must be set by per-app configuration file.
*
* @env NEW_RELIC_LICENSE_KEY
*/
license_key: '',
/**
* Hostname for the New Relic collector proxy.
*
* You shouldn't need to change this.
*
* @env NEW_RELIC_HOST
*/
host: 'collector.newrelic.com',
/**
* The port on which the collector proxy will be listening.
*
* You shouldn't need to change this.
*
* @env NEW_RELIC_PORT
*/
port: 443,
/**
* Whether or not to use SSL to connect to New Relic's servers.
*
* @env NEW_RELIC_USE_SSL
*/
ssl: true,
/**
* Proxy url
*
* A proxy url can be used in place of setting
* proxy_host, proxy_port, proxy_user, and proxy_pass.
*
* e.g. http://user:pass@host:port/
*
* Setting proxy will override other proxy settings.
*
* @env NEW_RELIC_PROXY_URL
*/
proxy: '',
/**
* Proxy host to use to connect to the internet.
*
* @env NEW_RELIC_PROXY_HOST
*/
proxy_host: '',
/**
* Proxy port to use to connect to the internet.
*
* @env NEW_RELIC_PROXY_PORT
*/
proxy_port: '',
/**
* Proxy user name when required.
*
* @env NEW_RELIC_PROXY_USER
*/
proxy_user: '',
/**
* Proxy password when required.
*
* @env NEW_RELIC_PROXY_PASS
*/
proxy_pass: '',
/**
* Custom SSL certificates
*
* If your proxy uses a custom SSL certificate, you can add the CA text to
* this array, one entry per certificate.
*
* The easiest way to do this is with `fs.readFileSync` e.g.
*
* certificates: [
* require('fs').readFileSync('custom.crt', 'utf8') // don't forget the utf8
* ]
*
*/
certificates: [],
/**
* You may want more control over how the module is configured and want to
* disallow the use of New Relic's server-side configuration. To do so, set
* this parameter to true. Some configuration information is required to make
* the module work properly with the rest of New Relic, but settings such as
* apdex_t and capture_params will not be override-able by New Relic with this
* setting in effect.
*
* @env NEW_RELIC_IGNORE_SERVER_CONFIGURATION
*/
ignore_server_configuration: false,
/**
* Whether the module is enabled.
*
* @env NEW_RELIC_ENABLED
*/
agent_enabled: true,
/**
* The default Apdex tolerating / threshold value for applications, in
* seconds. The default for Node is apdexT to 100 milliseconds, which is
* lower than New Relic standard, but Node.js applications tend to be more
* latency-sensitive than most.
*
* @env NEW_RELIC_APDEX
*/
apdex_t: 0.100,
/**
* Whether to capture parameters in the request URL in slow transaction
* traces and error traces. Because this can pass sensitive data, it's
* disabled by default. If there are specific parameters you want ignored,
* use ignored_params.
*
* @env NEW_RELIC_CAPTURE_PARAMS
*/
capture_params: false,
/**
* Array of parameters you don't want captured off request URLs in slow
* transaction traces and error traces.
*
* @env NEW_RELIC_IGNORED_PARAMS
*/
ignored_params: [],
logging: {
/**
* Verbosity of the module's logging. This module uses bunyan
* (https://github.com/trentm/node-bunyan) for its logging, and as such the
* valid logging levels are 'fatal', 'error', 'warn', 'info', 'debug' and
* 'trace'. Logging at levels 'info' and higher is very terse. For support
* requests, attaching logs captured at 'trace' level are extremely helpful
* in chasing down bugs.
*
* @env NEW_RELIC_LOG_LEVEL
*/
level: 'info',
/**
* Where to put the log file -- by default just uses process.cwd +
* 'newrelic_agent.log'. A special case is a filepath of 'stdout',
* in which case all logging will go to stdout, or 'stderr', in which
* case all logging will go to stderr.
*
* @env NEW_RELIC_LOG
*/
filepath: require('path').join(process.cwd(), 'newrelic_agent.log'),
/**
* Whether to write to a log file at all
*
* @env NEW_RELIC_LOG_ENABLED
*/
enabled: true,
/**
* Enables extra debugging at `warn` level. No need to enable except under
* specific debugging conditions.
*/
diagnostics: false
},
audit_log: {
/**
* Enables logging of out bound traffic from the Agent to the Collector.
* This field is ignored if trace level logging is enabled.
* With trace logging, all traffic is logged.
*
* @env NEW_RELIC_AUDIT_LOG_ENABLED
*/
enabled: false,
/**
* Specify which methods are logged. Used in conjuction with the audit_log flag
* If audit_log is enabled and this property is empty, all methods will be logged
* Otherwise, if the audit log is enabled, only the methods specified
* in the filter will be logged
* Methods include: error_data, metric_data, and analytic_event_data
*
* @env NEW_RELIC_AUDIT_LOG_ENDPOINTS
*/
endpoints: []
},
/**
* Whether to collect & submit error traces to New Relic.
*
* @env NEW_RELIC_ERROR_COLLECTOR_ENABLED
*/
error_collector: {
/**
* Disabling the error tracer just means that errors aren't collected
* and sent to New Relic -- it DOES NOT remove any instrumentation.
*/
enabled: true,
/**
* List of HTTP error status codes the error tracer should disregard.
* Ignoring a status code means that the transaction is not renamed to
* match the code, and the request is not treated as an error by the error
* collector.
*
* NOTE: This configuration value has no effect on errors recorded using
* `noticeError()`.
*
* Defaults to 404 NOT FOUND.
*
* @env NEW_RELIC_ERROR_COLLECTOR_IGNORE_ERROR_CODES
*/
ignore_status_codes: [404],
/**
* Whether error events are collected.
*/
capture_events: true,
/**
* The agent will collect all error events up to this number per minute.
* If there are more than that, a statistical sampling will be collected.
* Currently this uses a reservoir sampling algorithm.
*
* By increasing this setting you are both increasing the memory
* requirements of the agent as well as increasing the payload to the New
* Relic servers. The memory concerns are something you should consider for
* your own server's sake. The payload of events is compressed, but if it
* grows too large the New Relic servers may reject it.
*/
max_event_samples_stored: 100
},
/**
* Options regarding collecting system information. Used for system
* utilization based pricing scheme.
*/
utilization: {
/**
* This flag dictates whether the agent attempts to reach out to AWS
* to get info about the vm the process is running on.
*
* @env NEW_RELIC_UTILIZATION_DETECT_AWS
*/
detect_aws: true,
/**
* This flag dictates whether the agent attempts to reach out to AWS
* to get info about the container the process is running in.
*
* @env NEW_RELIC_UTILIZATION_DETECT_DOCKER
*/
detect_docker: true
},
transaction_tracer: {
/**
* Whether to collect & submit slow transaction traces to New Relic. The
* instrumentation is loaded regardless of this setting, as it's necessary
* to gather metrics. Disable the agent to prevent the instrumentation from
* loading.
*
* @env NEW_RELIC_TRACER_ENABLED
*/
enabled: true,
/**
* The duration at below which the slow transaction tracer should collect a
* transaction trace. If set to 'apdex_f', the threshold will be set to
* 4 * apdex_t, which with a default apdex_t value of 500 milliseconds will
* be 2 seconds.
*
* If a time is provided, it is set in seconds.
*
* @env NEW_RELIC_TRACER_THRESHOLD
*/
transaction_threshold: 'apdex_f',
/**
* Increase this parameter to increase the diversity of the slow
* transaction traces recorded by your application over time. Confused?
* Read on.
*
* Transactions are named based on the request (see the README for the
* details of how requests are mapped to transactions), and top_n refers to
* the "top n slowest transactions" grouped by these names. The module will
* only replace a recorded trace with a new trace if the new trace is
* slower than the previous slowest trace of that name. The default value
* for this setting is 20, as the transaction trace view page also defaults
* to showing the 20 slowest transactions.
*
* If you want to record the absolute slowest transaction over the last
* minute, set top_n to 0 or 1. This used to be the default, and has a
* problem in that it will allow one very slow route to dominate your slow
* transaction traces.
*
* The module will always record at least 5 different slow transactions in
* the reporting periods after it starts up, and will reset its internal
* slow trace aggregator if no slow transactions have been recorded for the
* last 5 harvest cycles, restarting the aggregation process.
*
* @env NEW_RELIC_TRACER_TOP_N
*/
top_n: 20,
/**
* This option affects both slow-queries and record_sql for transaction
* traces. It can have one of 3 values: 'off', 'obfuscated' or 'raw'
* When it is 'off' no slow queries will be captured, and backtraces
* and sql will not be included in transaction traces. If it is 'raw'
* or 'obfuscated' and other criteria (slow_sql.enabled etc) are met
* for a query. The raw or obfuscated sql will be included in the
* transaction trace and a slow query sample will be collected.
*/
record_sql: 'off',
/**
* This option affects both slow-queries and record_sql for transaction
* traces. This is the minimum duration a query must take (in ms) for it
* to be considered for for slow query and inclusion in transaction traces.
*/
explain_threshold: 500
},
/**
* Whether to enable internal supportability metrics and diagnostics. You're
* welcome to turn these on, but they will probably be most useful to the
* New Relic node engineering team.
*/
debug: {
/**
* Whether to collect and submit internal supportability metrics alongside
* application performance metrics.
*
* @env NEW_RELIC_DEBUG_METRICS
*/
internal_metrics: false,
/**
* Traces the execution of the transaction tracer. Requires logging.level
* to be set to 'trace' to provide any useful output.
*
* WARNING: The tracer tracing data is likely only to be intelligible to a
* small number of people inside New Relic, so you should probably only
* enable tracer tracing if asked to by New Relic, because it will affect
* performance significantly.
*
* @env NEW_RELIC_DEBUG_TRACER
*/
tracer_tracing: false
},
/**
* Rules for naming or ignoring transactions.
*/
rules: {
/**
* A list of rules of the format {pattern: 'pattern', name: 'name'} for
* matching incoming request URLs and naming the associated New Relic
* transactions. Both pattern and name are required. Additional attributes
* are ignored. Patterns may have capture groups (following JavaScript
* conventions), and names will use $1-style replacement strings. See
* the documentation for addNamingRule for important caveats.
*
* @env NEW_RELIC_NAMING_RULES
*/
name: [],
/**
* A list of patterns for matching incoming request URLs to be ignored by
* the agent. Patterns may be strings or regular expressions.
*
* By default, socket.io long-polling is ignored.
*
* @env NEW_RELIC_IGNORING_RULES
*/
ignore: [
'^\/socket\.io\/.*\/xhr-polling/'
]
},
/**
* By default, any transactions that are not affected by other bits of
* naming logic (the API, rules, or metric normalization rules) will
* have their names set to 'NormalizedUri/*'. Setting this value to
* false will set them instead to Uri/path/to/resource. Don't change
* this setting unless you understand the implications of New Relic's
* metric grouping issues and are confident your application isn't going
* to run afoul of them. Your application could end up getting black holed!
* Nobody wants that.
*
* @env NEW_RELIC_ENFORCE_BACKSTOP
*/
enforce_backstop: true,
/**
* Browser Monitoring
*
* Browser monitoring lets you correlate transactions between the server and browser
* giving you accurate data on how long a page request takes, from request,
* through the server response, up until the actual page render completes.
*/
browser_monitoring: {
/**
* Enable browser monitoring header generation.
*
* This does not auto-instrument, rather it enables the agent to generate headers.
* The newrelic module can generate the appropriate <script> header, but you must
* inject the header yourself, or use a module that does so.
*
* Usage:
*
* var newrelic = require('newrelic');
*
* router.get('/', function (req, res) {
* var header = newrelic.getBrowserTimingHeader();
* res.write(header)
* // write the rest of the page
* });
*
* This generates the <script>...</script> header necessary for Browser Monitoring
* This script must be manually injected into your templates, as high as possible
* in the header, but _after_ any X-UA-COMPATIBLE HTTP-EQUIV meta tags.
* Otherwise you may hurt IE!
*
* This method must be called _during_ a transaction, and must be called every
* time you want to generate the headers.
*
* Do *not* reuse the headers between users, or even between requests.
*
* @env NEW_RELIC_BROWSER_MONITOR_ENABLE
*/
enable: true,
/**
* Request un-minified sources from the server.
*
* @env NEW_RELIC_BROWSER_MONITOR_DEBUG
*/
debug: false
},
/**
* Transaction Events
*
* Transaction events are sent to New Relic Insights. This event data
* includes transaction timing, transaction name, and any custom parameters.
*
* Read more here: http://newrelic.com/insights
*/
transaction_events: {
/**
* If this is disabled, the agent does not collect, nor try to send,
* analytic data.
*/
enabled: true,
/**
* The agent will collect all events up to this number per minute. If
* there are more than that, a statistical sampling will be collected.
*/
max_samples_per_minute: 10000,
/**
* This is used if the agent is unable to send events to the collector.
* The values from the previous harvest cycle will be merged into the next
* one with this option as the limit.
*
* This should be *greater* than max_samples_per_minute or you'll see odd
* behavior. You probably want at least double the value, but more is okay
* as long as you can handle the memory overhead.
*/
max_samples_stored: 20000
},
/**
* Custom Insights Events
*
* Custom insights events are JSON object that are sent to New Relic
* Insights. You can tell the agent to send your custom events via the
* `newrelic.recordCustomEvent()` API. These events are sampled once the max
* reservoir size is reached. You can tune this setting below.
*
* Read more here: http://newrelic.com/insights
*/
custom_insights_events: {
/**
* If this is disabled, the agent does not collect, nor try to send, custom
* event data.
*/
enabled: true,
/**
* The agent will collect all events up to this number per minute. If there
* are more than that, a statistical sampling will be collected. Current
* this uses a reservoir sampling algorithm.
*
* By increasing this setting you are both increasing the memory
* requirements of the agent as well as increasing the payload to the New
* Relic servers. The memory concerns are something you should consider for
* your own server's sake. The payload of events is compressed, but if it
* grows too large the New Relic servers may reject it.
*/
max_samples_stored: 1000
},
/**
* This is used to configure properties about the user's host name.
*/
process_host: {
/**
* Configurable display name for hosts
*
* @env NEW_RELIC_PROCESS_HOST_DISPLAY_NAME
*/
display_name: '',
/**
* ip address preference when creating hostnames
*
* @env NEW_RELIC_IPV_PREFERENCE
*/
ipv_preference: '4'
},
/**
* High Security
*
* High security mode (v2) is a setting which prevents any sensitive data from
* being sent to New Relic. The local setting must match the server setting.
* If there is a mismatch the agent will log a message and act as if it is
* disabled.
*
* Attributes of high security mode (when enabled):
* * requires SSL
* * does not allow capturing of http params
* * does not allow custom params
*
* To read more see: https://docs.newrelic.com/docs/subscriptions/high-security
*/
high_security: false,
/**
* Labels
*
* An object of label names and values that will be applied to the data sent
* from this agent. Both label names and label values have a maximum length of
* 255 characters. This object should contain at most 64 labels.
*/
labels: {},
/**
* These options control behavior for slow queries, but do not affect sql
* nodes in transaction traces.
* slow_sql.enabled enables and disables slow_sql recording
* slow_sql.max_samples sets the maximum number of slow query samples that
* will be collected in a single harvest cycle.
*/
slow_sql: {
enabled: false,
max_samples: 10
},
/**
* Controls behavior of datastore instance metrics.
*
* @property {bool} [instance_reporting.enabled=true]
* Enables reporting the host and port/path/id of database servers. Default
* is `true`.
*
* @property {bool} [database_name_reporting.enabled=true]
* Enables reporting of database/schema names. Default is `true`.
*/
datastore_tracer: {
instance_reporting: {enabled: true},
database_name_reporting: {enabled: true}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 1 2 2 2 10 10 8 8 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 1 1 1 | 'use strict'
var util = require('util')
var path = require('path')
var fs = require('fs')
var EventEmitter = require('events').EventEmitter
var NAMES = require('./metrics/names.js')
var feature_flag = require('./feature_flags.js')
var flatten = require('./util/flatten')
var hashes = require('./util/hashes')
var exists = fs.existsSync || path.existsSync
var safeJSON = require('./util/safe-json')
var stringifySync = safeJSON.stringifySync
var parse = safeJSON.parse
var os = require('os')
var logger
/**
* CONSTANTS -- we gotta lotta 'em
*/
var DEFAULT_CONFIG_PATH = path.join(__dirname, 'config.default.js')
var DEFAULT_CONFIG = require(DEFAULT_CONFIG_PATH).config
var DEFAULT_FILENAME = 'newrelic.js'
var AZURE_APP_NAME = 'APP_POOL_ID'
var CONFIG_FILE_LOCATIONS = [
process.env.NEW_RELIC_HOME,
process.cwd(),
process.env.HOME,
path.join(__dirname, '../../..') // above node_modules
]
// the REPL has no main module
Eif (process.mainModule && process.mainModule.filename) {
CONFIG_FILE_LOCATIONS.splice(2, 0, path.dirname(process.mainModule.filename))
}
/*
* ENV_MAPPING, LIST_VARS, and BOOLEAN_VARS could probably be unified and
* objectified, but this is simple and works.
*/
var ENV_MAPPING = {
newrelic_home: "NEW_RELIC_HOME",
app_name: "NEW_RELIC_APP_NAME",
license_key: "NEW_RELIC_LICENSE_KEY",
ssl: "NEW_RELIC_USE_SSL",
host: "NEW_RELIC_HOST",
port: "NEW_RELIC_PORT",
proxy: "NEW_RELIC_PROXY_URL",
proxy_host: "NEW_RELIC_PROXY_HOST",
proxy_port: "NEW_RELIC_PROXY_PORT",
proxy_user: "NEW_RELIC_PROXY_USER",
proxy_pass: "NEW_RELIC_PROXY_PASS",
ignore_server_configuration: "NEW_RELIC_IGNORE_SERVER_CONFIGURATION",
agent_enabled: "NEW_RELIC_ENABLED",
apdex_t: "NEW_RELIC_APDEX",
capture_params: "NEW_RELIC_CAPTURE_PARAMS",
ignored_params: "NEW_RELIC_IGNORED_PARAMS",
logging: {
level: "NEW_RELIC_LOG_LEVEL",
filepath: "NEW_RELIC_LOG",
enabled: "NEW_RELIC_LOG_ENABLED"
},
audit_log: {
enabled: "NEW_RELIC_AUDIT_LOG_ENABLED",
endpoints: "NEW_RELIC_AUDIT_LOG_ENDPOINTS"
},
error_collector: {
enabled: "NEW_RELIC_ERROR_COLLECTOR_ENABLED",
ignore_status_codes: "NEW_RELIC_ERROR_COLLECTOR_IGNORE_ERROR_CODES"
},
transaction_tracer: {
enabled: "NEW_RELIC_TRACER_ENABLED",
transaction_threshold: "NEW_RELIC_TRACER_THRESHOLD",
top_n: "NEW_RELIC_TRACER_TOP_N",
record_sql: "NEW_RELIC_RECORD_SQL",
explain_threshold: "NEW_RELIC_EXPLAIN_THRESHOLD"
},
utilization: {
detect_aws: "NEW_RELIC_UTILIZATION_DETECT_AWS",
detect_docker: "NEW_RELIC_UTILIZATION_DETECT_DOCKER",
logical_processors: "NEW_RELIC_UTILIZATION_LOGICAL_PROCESSORS",
total_ram_mib: "NEW_RELIC_UTILIZATION_TOTAL_RAM_MIB",
billing_hostname: "NEW_RELIC_UTILIZATION_BILLING_HOSTNAME"
},
debug: {
internal_metrics: "NEW_RELIC_DEBUG_METRICS",
tracer_tracing: "NEW_RELIC_DEBUG_TRACER"
},
rules: {
name: "NEW_RELIC_NAMING_RULES",
ignore: "NEW_RELIC_IGNORING_RULES"
},
enforce_backstop: "NEW_RELIC_ENFORCE_BACKSTOP",
browser_monitoring: {
enable: "NEW_RELIC_BROWSER_MONITOR_ENABLE",
debug: "NEW_RELIC_BROWSER_MONITOR_DEBUG"
},
high_security: "NEW_RELIC_HIGH_SECURITY",
labels: "NEW_RELIC_LABELS",
slow_sql: {
enabled: "NEW_RELIC_SLOW_SQL_ENABLED",
max_samples: "NEW_RELIC_MAX_SQL_SAMPLES"
},
process_host: {
display_name: "NEW_RELIC_PROCESS_HOST_DISPLAY_NAME",
ipv_preference: "NEW_RELIC_IPV_PREFERENCE"
},
datastore_tracer: {
instance_reporting: {
enabled: "NEW_RELIC_DATASTORE_INSTANCE_REPORTING_ENABLED"
},
database_name_reporting:{
enabled: "NEW_RELIC_DATASTORE_DATABASE_NAME_REPORTING_ENABLED"
}
}
}
// values in list variables are comma-delimited lists
var LIST_VARS = [
"NEW_RELIC_APP_NAME",
"NEW_RELIC_IGNORED_PARAMS",
"NEW_RELIC_ERROR_COLLECTOR_IGNORE_ERROR_CODES",
"NEW_RELIC_IGNORING_RULES",
"NEW_RELIC_AUDIT_LOG_ENDPOINTS"
]
// values in object lists are comma-delimited object literals
var OBJECT_LIST_VARS = [
"NEW_RELIC_NAMING_RULES"
]
var HAS_ARBITRARY_KEYS = [
'labels'
]
/*
* Values in boolean variables. Is pretty tolerant about values, but
* don't get fancy and just use 'true' and 'false', everybody.
*/
var BOOLEAN_VARS = [
"NEW_RELIC_IGNORE_SERVER_CONFIGURATION",
"NEW_RELIC_ENABLED",
"NEW_RELIC_CAPTURE_PARAMS",
"NEW_RELIC_ERROR_COLLECTOR_ENABLED",
"NEW_RELIC_TRACER_ENABLED",
"NEW_RELIC_DEBUG_METRICS",
"NEW_RELIC_DEBUG_TRACER",
"NEW_RELIC_ENFORCE_BACKSTOP",
"NEW_RELIC_USE_SSL",
"NEW_RELIC_BROWSER_MONITOR_ENABLE",
"NEW_RELIC_BROWSER_MONITOR_DEBUG",
"NEW_RELIC_HIGH_SECURITY",
"NEW_RELIC_SLOW_SQL_ENABLED",
"NEW_RELIC_LOG_ENABLED",
"NEW_RELIC_AUDIT_LOG_ENABLED",
"NEW_RELIC_DATASTORE_DATABASE_NAME_REPORTING_ENABLED",
"NEW_RELIC_DATASTORE_INSTANCE_REPORTING_ENABLED"
]
var FLOAT_VARS = [
'NEW_RELIC_APDEX'
]
var INT_VARS = [
'NEW_RELIC_EXPLAIN_THRESHOLD',
'NEW_RELIC_MAX_SQL_SAMPLES'
]
// Config keys that can't be set by the server if high_security === true
var HIGH_SECURITY_SETTINGS = {
ssl: true,
capture_params: false,
transaction_tracer: {
record_sql: 'off'
},
slow_sql: {
enabled: false
}
}
var HIGH_SECURITY_KEYS = Object.keys(flatten({}, '', HIGH_SECURITY_SETTINGS))
// blank out these config values before sending to the collector
var REDACT_BEFORE_SEND = ['proxy_pass', 'proxy_user', 'proxy']
// process.domain needs to be stripped befeore sending
var REMOVE_BEFORE_SEND = ['domain']
var _configInstance = null
function isTruthular(setting) {
Eif (setting === undefined || setting === null) return false
var normalized = setting.toString().toLowerCase()
switch (normalized) {
case 'false':
case 'f':
case 'no':
case 'n':
case 'disabled':
case '0':
return false
default:
return true
}
}
function fromObjectList(setting) {
try {
return JSON.parse('[' + setting + ']')
} catch (error) {
logger.error("New Relic configurator could not deserialize object list:")
logger.error(error.stack)
}
}
function _findConfigFile() {
var candidate
var filepath
for (var i = 0; i < CONFIG_FILE_LOCATIONS.length; i++) {
candidate = CONFIG_FILE_LOCATIONS[i]
if (!candidate) continue
filepath = path.join(path.resolve(candidate), DEFAULT_FILENAME)
Eif (!exists(filepath)) continue
return fs.realpathSync(filepath)
}
}
function Config(config) {
EventEmitter.call(this)
// 1. start by cloning the defaults
try {
var basis = JSON.parse(stringifySync(DEFAULT_CONFIG))
Object.keys(basis).forEach(function cb_forEach(key) {
this[key] = basis[key]
}, this)
} catch (err) {
logger.warn('Unable to clone the default config, %s: %s', DEFAULT_CONFIG_PATH, err)
}
if (config &&
(process.env[ENV_MAPPING.ssl] === 'false' || config.ssl === false) &&
process.env[ENV_MAPPING.port] === undefined && config.port === undefined ) {
config.port = 80
}
// 2. initialize undocumented, internal-only default values
// feature flags are mostly private settings for gating unreleased features
// flags are set in the feature_flags.js file
this.feature_flag = feature_flag.prerelease
// set by environment
this.newrelic_home = null
// set by configuration file loader
this.config_file_path = null
// set by collector on handshake
this.run_id = null
this.application_id = null
this.web_transactions_apdex = {}
this.cross_process_id = null
this.encoding_key = null
this.obfuscatedId = null
this.trusted_account_ids = null
// how frequently harvester runs
this.data_report_period = 60
// this value is arbitrary
this.max_trace_segments = 900
// feature level of this account
this.product_level = 0
// product-level related
this.collect_traces = true
this.collect_errors = true
// override options for utilization stats
this.utilization.logical_processors = null
this.utilization.total_ram_mib = null
this.utilization.billing_hostname = null
this.browser_monitoring.loader = 'rum'
this.browser_monitoring.loader_version = ''
// Settings to play nice with DLPs (see NODE-1044).
this.compressed_content_encoding = "deflate" // Deflate or gzip
this.simple_compression = false // Disables subcomponent compression
this.put_for_data_send = false // Changes http verb for harvest
// 3. override defaults with values from the loaded / passed configuration
this._fromPassed(config)
// 3.5. special values (only Azure environment APP_POOL_ID for now)
this._fromSpecial()
// 4. override config with environment variables
this._fromEnvironment()
// 5. clean up anything that requires postprocessing
this._canonicalize()
// 6. put the version in the config
this.version = require('../package.json').version
// 7. apply high security overrides
if (this.high_security === true) {
this._applyHighSecurity()
}
}
util.inherits(Config, EventEmitter)
/**
* Because this module and logger depend on each other, the logger needs
* a way to inject the actual logger instance once it's constructed.
* It's kind of a Rube Goldberg device, but it works.
*
* @param {Logger} bootstrapped The actual, configured logger.
*/
Config.prototype.setLogger = function setLogger(bootstrapped) {
logger = bootstrapped
}
/**
* Accept any configuration passed back from the server. Will log all
* recognized, unsupported, and unknown parameters. Some may not be set,
* depending on the setting of ignore_server_configuration.
*
* @param {object} json The config blob sent by New Relic.
*/
Config.prototype.onConnect = function onConnect(json, recursion) {
json = json || {}
if (this.high_security === true && recursion !== true && json.high_security !== true) {
this.agent_enabled = false
this.emit('agent_enabled', false)
return
}
if (Object.keys(json).length === 0) return
Object.keys(json).forEach(function cb_forEach(key) {
this._fromServer(json, key)
}, this)
this.emit('change', this)
}
/**
* The guts of the logic about how to deal with server-side configuration.
*
* @param {object} params A configuration dictionary.
* @param {string} key The particular configuration parameter to set.
*/
Config.prototype._fromServer = function _fromServer(params, key) {
switch (key) {
// handled by the connection
case 'messages':
break
// *sigh* Xzibit, etc.
case 'agent_config':
this.onConnect(params[key], true)
break
// if it's undefined or null, so be it
case 'agent_run_id':
this.run_id = params.agent_run_id
break
// handled by config.onConnect
case 'high_security':
break
// always accept these settings
case 'cross_process_id':
case 'encoding_key':
this._alwaysUpdateIfChanged(params, key)
if (this.cross_process_id && this.encoding_key) {
this.obfuscatedId = hashes.obfuscateNameUsingKey(this.cross_process_id,
this.encoding_key)
}
break
// always accept these settings
case 'collect_traces':
case 'collect_errors':
case 'product_level':
case 'application_id':
case 'trusted_account_ids':
this._alwaysUpdateIfChanged(params, key)
break
case 'collect_error_events':
if (params.collect_error_events === false) {
this._updateNestedIfChanged(
params,
this.error_collector,
key,
'capture_events'
)
}
break
// also accept these settings
case 'url_rules':
case 'metric_name_rules':
case 'transaction_name_rules':
case 'transaction_segment_terms':
this._emitIfSet(params, key)
break
// setting these can be disabled by ignore_server_configuration
case 'ssl':
case 'apdex_t':
case 'web_transactions_apdex':
case 'data_report_period':
case 'ignored_params':
this._updateIfChanged(params, key)
break
case 'transaction_tracer.enabled':
this._updateNestedIfChanged(
params,
this.transaction_tracer,
'transaction_tracer.enabled',
'enabled'
)
break
case 'transaction_tracer.transaction_threshold':
this._updateNestedIfChanged(
params,
this.transaction_tracer,
'transaction_tracer.transaction_threshold',
'transaction_threshold'
)
break
case 'error_collector.enabled':
this._updateNestedIfChanged(
params,
this.error_collector,
'error_collector.enabled',
'enabled'
)
break
case 'error_collector.ignore_status_codes':
this._updateNestedIfChanged(
params,
this.error_collector,
'error_collector.ignore_status_codes',
'ignore_status_codes'
)
this._canonicalize()
break
case 'error_collector.capture_events':
this._updateNestedIfChanged(
params,
this.error_collector,
'error_collector.capture_events',
'capture_events'
)
break
case 'error_collector.max_event_samples_stored':
this._updateNestedIfChanged(
params,
this.error_collector,
'error_collector.max_event_samples_stored',
'max_event_samples_stored'
)
break
case 'collect_analytics_events':
// never enable from server-side
// but we allow the server to disable
if (params.collect_analytics_events === false)
this.transaction_events.enabled = false
break
case 'collect_custom_events':
// never enable from server-side
// but we allow the server to disable
if (params.collect_custom_events === false)
this.custom_insights_events.enabled = false
break
case 'transaction_events.max_samples_stored':
this._updateNestedIfChanged(
params,
this.transaction_events,
key,
'max_samples_stored'
)
break
case 'transaction_events.max_samples_per_minute':
this._updateNestedIfChanged(
params,
this.transaction_events,
key,
'max_samples_per_minute'
)
break
case 'transaction_events.enabled':
this._updateNestedIfChanged(
params,
this.transaction_events,
key,
'enabled'
)
break
// these are used by browser_monitoring
// and the api.getRUMHeader() method
case 'js_agent_file':
case 'js_agent_loader_file':
case 'beacon':
case 'error_beacon':
case 'browser_key':
case 'js_agent_loader':
this._updateNestedIfChangedRaw(
params,
this.browser_monitoring,
key,
key
)
break
case 'browser_monitoring.loader':
this._updateNestedIfChangedRaw(
params,
this.browser_monitoring,
key,
'loader'
)
break
// After 2015-02, the collector no longer supports the capture_params setting.
case 'capture_params':
break
// these settings aren't supported by the agent (yet)
case 'sampling_rate':
case 'episodes_file':
case 'episodes_url':
case 'cross_application_tracing':
case 'transaction_tracer.record_sql':
case 'slow_sql.enabled':
case 'rum.load_episodes_file':
this.logUnsupported(params, key)
break
default:
this.logUnknown(params, key)
}
}
/**
* Change a value sent by the collector if and only if it's different from the
* value we already have. Emit an event with the key name and the new value,
* and log that the value has changed.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value we're looking to set.
*/
Config.prototype._alwaysUpdateIfChanged = function _alwaysUpdateIfChanged(json, key) {
var value = json[key]
if (value !== null && value !== undefined && this[key] !== value) {
if (Array.isArray(value) && Array.isArray(this[key])) {
value.forEach(function cb_forEach(element) {
if (this[key].indexOf(element) === -1) this[key].push(element)
}, this)
} else {
this[key] = value
}
this.emit(key, value)
logger.debug("Configuration of %s was changed to %s by New Relic.", key, value)
}
}
/**
* Change a value sent by the collector if and only if it's different from the
* value we already have. Emit an event with the key name and the new value,
* and log that the value has changed. Parameter will be ignored if
* ignore_server_configuration is set.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value we're looking to set.
*/
Config.prototype._updateIfChanged = function _updateIfChanged(json, key) {
this._updateNestedIfChanged(json, this, key, key)
}
/**
* Some parameter values are nested, need a simple way to change them as well.
* Will merge local and remote if and only if both are arrays. Parameter will
* be ignored if ignore_server_configuration is set.
*
* @param {object} remote JSON sent from New Relic.
* @param {object} local A portion of this configuration object.
* @param {string} remoteKey The name sent by New Relic.
* @param {string} localKey The local name.
*/
Config.prototype._updateNestedIfChanged = _updateNestedIfChanged
function _updateNestedIfChanged(remote, local, remoteKey, localKey) {
if (this.ignore_server_configuration) return this.logDisabled(remote, remoteKey)
// if high-sec mode is enabled, we do not accept server changes to high-sec
if (this.high_security && HIGH_SECURITY_KEYS.indexOf(localKey) !== -1) {
return this.logDisabled(remote, remoteKey)
}
return this._updateNestedIfChangedRaw(remote, local, remoteKey, localKey)
}
Config.prototype._updateNestedIfChangedRaw = function _updateNestedIfChangedRaw(
remote, local, remoteKey, localKey) {
var value = remote[remoteKey]
if (value !== null && value !== undefined && local[localKey] !== value) {
if (Array.isArray(value) && Array.isArray(local[localKey])) {
value.forEach(function cb_forEach(element) {
if (local[localKey].indexOf(element) === -1) local[localKey].push(element)
})
} else {
local[localKey] = value
}
this.emit(remoteKey, value)
logger.debug("Configuration of %s was changed to %s by New Relic.", remoteKey, value)
}
}
/**
* Some parameter values are just to be passed on.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value we're looking to set.
*/
Config.prototype._emitIfSet = function _emitIfSet(json, key) {
var value = json[key]
if (value !== null && value !== undefined) this.emit(key, value)
}
/**
* The agent would normally do something with this parameter, but server-side
* configuration is disabled via ignore_server_configuration.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value the agent won't set.
*/
Config.prototype.logDisabled = function logDisabled(json, key) {
var value = json[key]
if (value !== null && value !== undefined) {
logger.debug(
"Server-side configuration of %s is currently disabled by local configuration. " +
"(Server sent value of %s.)",
key,
value
)
}
}
/**
* Help support out by putting in the logs the fact that we don't currently
* support the provided configuration key, and including the sent value.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value the agent doesn't set.
*/
Config.prototype.logUnsupported = function logUnsupported(json, key) {
var flavor
if (this.ignore_server_configuration) {
flavor = "ignored"
} else {
flavor = "not supported by the Node.js agent"
}
var value = json[key]
if (value !== null && value !== undefined) {
logger.debug(
"Server-side configuration of %s is currently %s. (Server sent value of %s.)",
key,
flavor,
value
)
this.emit(key, value)
}
}
/**
* The agent knows nothing about this parameter.
*
* @param {object} json Config blob sent by collector.
* @param {string} key Value the agent knows nothing about.
*/
Config.prototype.logUnknown = function logUnknown(json, key) {
var value = json[key]
logger.debug(
"New Relic sent unknown configuration parameter %s with value %s.",
key,
value
)
}
/**
* Gets the user set host display name. If not provided, it returns the default value.
*
* This function is written is this strange way becauase of the use of caching variables.
* I wanted to cache the DisplayHost, but if I attached the variable to the config object,
* it sends the extra variable to New Relic, which is not desired.
*
* @return {string} display host name
*/
Config.prototype.getDisplayHost = getDisplayHost
Config.prototype.clearDisplayHostCache = function clearDisplayHostCache() {
this.getDisplayHost = getDisplayHost
}
function getDisplayHost() {
var _displayHost
this.getDisplayHost = function getCachedDisplayHost() {
return _displayHost
}
if (this.process_host.display_name === '') {
_displayHost = this.getHostnameSafe()
return _displayHost
}
var stringBuffer = new Buffer(this.process_host.display_name, 'utf8')
var numBytes = stringBuffer.length
if (numBytes > 255) {
logger.warn('Custom host display name must be less than 255 bytes')
_displayHost = this.getHostnameSafe()
return _displayHost
}
_displayHost = this.process_host.display_name
return _displayHost
}
/**
* Gets the system's host name. If that fails, it just returns ipv4/6 based on the user's
* process_host.ipv_preferenece setting.
*
* This function is written is this strange way becauase of the use of caching variables.
* I wanted to cache the Hostname, but if I attached the variable to the config object,
* it sends the extra variable to New Relic, which is not desired.
*
* @return {string} host name
*/
Config.prototype.getHostnameSafe = getHostnameSafe
Config.prototype.clearHostnameCache = function clearHostnameCache() {
this.getHostnameSafe = getHostnameSafe
}
Config.prototype.getIPAddresses = function getIPAddresses() {
var addresses = {}
var interfaces = os.networkInterfaces()
for (var interfaceKey in interfaces) {
if (interfaceKey.match(/^lo/)) continue
var interfaceDescriptions = interfaces[interfaceKey]
for (var i = 0; i < interfaceDescriptions.length; i++) {
var description = interfaceDescriptions[i]
var family = description.family.toLowerCase()
addresses[family] = description.address
}
}
return addresses
}
function getHostnameSafe() {
var _hostname
this.getHostnameSafe = function getCachedHostname() {
return _hostname
}
try {
_hostname = os.hostname()
return _hostname
} catch (e) {
var addresses = this.getIPAddresses()
if (this.process_host.ipv_preference === '6' && addresses.ipv6) {
_hostname = addresses.ipv6
} else if (addresses.ipv4) {
logger.info('Defaulting to ipv4 address for host name')
_hostname = addresses.ipv4
} else if (addresses.ipv6) {
logger.info('Defaulting to ipv6 address for host name')
_hostname = addresses.ipv6
} else {
logger.info('No hostname, ipv4, or ipv6 address found for machine')
_hostname = 'UNKNOWN_BOX'
}
return _hostname
}
}
/**
* Ensure that the apps names are always returned as a list.
*/
Config.prototype.applications = function applications() {
var apps = this.app_name
if (Array.isArray(apps) && apps.length > 0) {
return apps
}
if (apps && typeof apps === 'string') {
return [apps]
}
return []
}
/**
* Safely overwrite defaults with values passed to constructor.
*
* @param object external The configuration being loaded.
* @param object internal Whichever chunk of the config being overridden.
*/
Config.prototype._fromPassed = function _fromPassed(external, internal, arbitrary) {
if (!external) return
if (!internal) internal = this
Object.keys(external).forEach(function cb_forEach(key) {
// if it's not in the defaults, it doesn't exist
if (!arbitrary && internal[key] === undefined) return
try {
var node = external[key]
} catch (err) {
logger.warn('Error thrown on access of user config for key: %s', key)
return
}
if (Array.isArray(node)) {
internal[key] = node
} else if (typeof node === 'object') {
// is top level and can have arbitrary keys
if (internal === this && HAS_ARBITRARY_KEYS.indexOf(key) !== -1) {
this._fromPassed(node, internal[key], true)
} else {
this._fromPassed(node, internal[key], false)
}
} else {
internal[key] = node
}
}, this)
}
/**
* Some values should be picked up only if they're not otherwise set, like
* the Windows / Azure application name. Don't set it if there's already
* a non-empty value set via the configuration file, and allow these
* values to be overwritten by environment variables. Just saves a step for
* PaaS users who don't want to have multiple settings for a single piece
* of configuration.
*/
Config.prototype._fromSpecial = function _fromSpecial() {
var name = this.app_name
if (name === null || name === undefined || name === '' ||
(Array.isArray(name) && name.length === 0)) {
var azureName = process.env[AZURE_APP_NAME]
if (azureName) this.app_name = azureName.split(',')
}
}
/**
* Recursively visit the nodes of the constant containing the mapping between
* environment variable names, overriding any configuration values that are
* found in the environment. Operates purely via side effects.
*
* @param object metadata The current level of the mapping object. Should never
* need to set this yourself.
* @param object data The current level of the configuration object. Should
* never need to set this yourself.
*/
Config.prototype._fromEnvironment = function _fromEnvironment(metadata, data) {
if (!metadata) metadata = ENV_MAPPING
if (!data) data = this
Object.keys(metadata).forEach(function cb_forEach(value) {
// if it's not in the config, it doesn't exist
if (data[value] === undefined) return
var node = metadata[value]
if (typeof node === 'string') {
var setting = process.env[node]
if (setting) {
if (LIST_VARS.indexOf(node) > -1) {
data[value] = setting.split(',').map(function cb_map(k) {
return k.trim()
})
} else if (OBJECT_LIST_VARS.indexOf(node) > -1) {
data[value] = fromObjectList(setting)
} else if (BOOLEAN_VARS.indexOf(node) > -1) {
data[value] = isTruthular(setting)
} else if (FLOAT_VARS.indexOf(node) > -1) {
data[value] = parseFloat(setting, 10)
} else if (INT_VARS.indexOf(node) > -1) {
data[value] = parseInt(setting, 10)
} else {
data[value] = setting
}
}
} else {
// don't crash if the mapping has config keys the current config doesn't.
if (!data[value]) data[value] = {}
this._fromEnvironment(node, data[value])
}
}, this)
}
/**
* Depending on how the status codes are set, they could be strings, which
* makes strict equality testing / indexOf fail. To keep things cheap, parse
* them once, after configuration has finished loading. Other one-off shims
* based on special properties of configuration values should go here as well.
*/
Config.prototype._canonicalize = function _canonicalize() {
var codes = this.error_collector && this.error_collector.ignore_status_codes
if (codes) {
this.error_collector.ignore_status_codes = codes.map(function cb_map(code) {
return parseInt(code, 10)
})
}
var logAliases = {
'verbose': 'trace',
'debugging': 'debug',
'warning': 'warn',
'err': 'error'
}
var level = this.logging.level
this.logging.level = logAliases[level] || level
}
/**
* This goes through the settings that high security mode needs and coerces
* them to be correct.
*/
Config.prototype._applyHighSecurity = function _applyHighSecurity() {
var config = this
checkNode('', this, HIGH_SECURITY_SETTINGS)
function checkNode(base, target, settings) {
Object.keys(settings).forEach(checkKey.bind(null, base, target, settings))
}
function checkKey(base, target, settings, key) {
var hsValue = settings[key]
if (hsValue && typeof hsValue === 'object') {
if (typeof target[key] !== 'object') {
logger.warn(
'High Security Mode: %s should be an object, found %s',
key,
target[key]
)
target[key] = {}
}
return checkNode(base + key + '.', target[key], hsValue)
}
if (target[key] !== hsValue) {
logger.warn('High Security Mode: %s was set to %s, coercing to %s',
key, target[key], hsValue)
target[key] = hsValue
config.emit(base + key, hsValue)
}
}
}
/**
* The agent will use the supportability metrics object if it's
* available.
*
* @param string suffix Supportability metric name.
* @param number duration Milliseconds that the measured operation took.
*/
Config.prototype.measureInternal = function measureInternal(suffix, duration) {
if (this.debug.supportability) {
var internal = this.debug.supportability
internal.measureMilliseconds(NAMES.SUPPORTABILITY.PREFIX + suffix, null, duration)
}
}
Config.prototype.validateFlags = function validateFlags() {
Object.keys(this.feature_flag).forEach(function cb_forEach(key) {
if (feature_flag.released.indexOf(key) > -1) {
logger.warn('Feature flag ' + key + ' has been released')
}
if (feature_flag.unreleased.indexOf(key) > -1) {
logger.warn('Feature flag ' + key + ' has been deprecated')
}
})
}
/**
* Get a JSONifiable object containing all settings we want to report to the
* collector and store in the environment_values table.
*
* @return Object containing simple key-value pairs of settings
*/
Config.prototype.publicSettings = function publicSettings() {
var settings = {}
for (var key in this) {
if (this.hasOwnProperty(key)) {
var item = this[key]
if (REDACT_BEFORE_SEND.indexOf(key) > -1) {
item = '****'
}
if (REMOVE_BEFORE_SEND.indexOf(key) === -1) {
settings[key] = item
}
}
}
// Agent-side setting is 'enable', but collector-side setting is
// 'auto_instrument'. Send both values up.
settings.browser_monitoring.auto_instrument = settings.browser_monitoring.enable
// Remove simple circular references
parse(stringifySync(settings), function cb_parse(err, settingsCopy) {
if (err === null) {
settings = flatten({}, '', settingsCopy)
} else {
logger.warn('Error while creating deep copy: %s', err)
}
})
return settings
}
/**
* Create a configuration, either from a configuration file or the node
* process's environment.
*
* For configuration via file, check these directories, in order, for a
* file named 'newrelic.js':
*
* 1. The process's current working directory at startup.
* 2. The same directory as the process's main module (i.e. the filename
* passed to node on the command line).
* 3. The directory pointed to by the environment variable NEW_RELIC_HOME.
* 4. The current process's HOME directory.
* 5. If this module is installed as a dependency, the directory above the
* node_modules folder in which newrelic is installed.
*
* For configration via environment (useful on Joyent, Azure, Heroku, or
* other PaaS offerings), set NEW_RELIC_NO_CONFIG_FILE to something truthy
* and read README.md for details on what configuration variables are
* necessary, as well as a complete enumeration of the other available
* variables.
*
* @param {object} config Optional configuration to be used in place of a
* config file.
*/
function initialize(config) {
/* When the logger is required here, it bootstraps itself and then
* injects itself into this module's closure via setLogger on the
* instance of the logger it creates.
*/
logger = require('./logger.js')
Iif (config) return new Config(config)
Iif (isTruthular(process.env.NEW_RELIC_NO_CONFIG_FILE)) {
config = new Config({})
if (config.newrelic_home) delete config.newrelic_home
return config
}
var filepath = _findConfigFile()
Eif (!filepath) {
_noConfigFile()
return null
}
try {
config = new Config(require(filepath).config)
config.config_file_path = filepath
logger.debug("Using configuration file %s.", filepath)
config.validateFlags()
return config
} catch (error) {
logger.error(error)
throw new Error(
"Unable to read configuration file " + filepath + ". A default\n" +
"configuration file can be copied from " + DEFAULT_CONFIG_PATH + "\n" +
"and renamed to 'newrelic.js' in the directory from which you'll be starting\n" +
"your application."
)
}
}
function _noConfigFile() {
var mainpath = path.resolve(path.join(process.cwd(), DEFAULT_FILENAME))
var altpath = path.resolve(
path.dirname(process.mainModule.filename),
DEFAULT_FILENAME
)
var locations
Iif (mainpath !== altpath) {
locations = mainpath + " or\n" + altpath
} else {
locations = mainpath
}
/* eslint-disable no-console */
console.error(
"Unable to find New Relic module configuration. A default\n" +
"configuration file can be copied from " + DEFAULT_CONFIG_PATH + "\n" +
"and put at " + locations + ". If you are not using file based config\n" +
"please set the environment variable NEW_RELIC_NO_CONFIG_FILE=true"
)
/* eslint-enable no-console */
}
/**
* This function honors the singleton nature of this module while allowing
* consumers to just request an instance without having to worry if one was
* already created.
*/
function getOrCreateInstance() {
Eif (_configInstance === null) {
_configInstance = initialize()
}
return _configInstance
}
/**
* Preserve the legacy initializer, but also allow consumers to manage their
* own configuration if they choose.
*/
Config.initialize = initialize
Config.getOrCreateInstance = getOrCreateInstance
module.exports = Config
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 | 1 1 1 1 1 1 1 1 1 4 1 14 14 1 1 14 14 14 27 2 25 25 25 25 25 8 14 1 14 14 14 27 27 5 5 8 14 1 1 1 1 1 6 6 6 6 6 6 1 1 3 3 3 3 3 1 1 1 1 1 1 1 2 2 25 25 25 1 24 2 24 24 1 1 1 1 50 6 6 6 6 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var path = require('path')
var fs = require('fs')
var os = require('os')
var logger = require('../lib/logger').child({component: 'environment'})
var stringifySync = require('./util/safe-json').stringifySync
var exists = fs.existsSync || path.existsSync
// As of 1.7.0 you can no longer dynamically link v8
// https://github.com/nodejs/io.js/commit/d726a177ed
var remapping = {
node_install_npm: "npm installed?",
node_install_waf: "WAF build system installed?",
node_use_openssl: "OpenSSL support?",
node_shared_openssl: "Dynamically linked to OpenSSL?",
node_shared_v8: "Dynamically linked to V8?",
node_shared_zlib: "Dynamically linked to Zlib?",
node_use_dtrace: "DTrace support?",
node_use_etw: "Event Tracing for Windows (ETW) support?"
}
var settings = {}
/**
* Fetches the setting of the given name, defaulting to an empty array.
*
* @param {string} name - The name of the setting to look for.
*
* @return {Array.<string>} An array of values matching that name.
*/
function getSetting(name) {
return settings[name] || []
}
/**
* Add a setting to the module's shared settings object.
*
* @param {string} name - The name of the setting value being added.
* @param {string} value - The value to add or the setting.
*/
function addSetting(name, value) {
Eif (!settings[name]) {
settings[name] = [value]
} else if (settings[name].indexOf(value) === -1) {
settings[name].push(value)
}
}
/**
* Remove settings with the given name.
*
* @param {string} name - The name of the setting to remove.
*/
function clearSetting(name) {
delete settings[name]
}
/**
* Build up a list of top-level packages available to an application relative to
* the provided root.
*
* @param {string} root - Path to start listing packages from.
* @param {Array} [packages] - Array to append found packages to.
*
* @return {Array} List of packages.
*/
function listPackages(root, packages) {
Iif (!packages) {
packages = []
}
try {
fs.readdirSync(root).forEach(function forEachReadDirSync(dir) {
// Skip npm's binary directory where it stores executables.
if (dir === '.bin') {
return
}
var version = null
try {
var pck = path.resolve(root, dir, 'package.json')
version = JSON.parse(fs.readFileSync(pck)).version
} catch (e) {
logger.debug('Could not load %s for environment scan', pck || dir)
}
packages.push([dir, version || '<unknown>'])
})
} catch (e) {
logger.trace(e, 'Failed to list packages in %s', root)
}
return packages
}
/**
* Build up a list of dependencies from a given node_module root.
*
* @param {string} root - Path to start listing dependencies from.
* @param {Array} [children] - Array to append found dependencies to.
*
* @return {Array} List of dependencies.
*/
function listDependencies(root, children) {
Iif (!children) {
children = []
}
try {
fs.readdirSync(root).forEach(function forEachReadDirSync(entry) {
var candidate = path.resolve(root, entry, 'node_modules')
// Performing this exists check is cheaper than unwinding the stack for
// all the failed read attempts.
if (exists(candidate)) {
listPackages(candidate, children)
listDependencies(candidate, children)
}
})
} catch (e) {
logger.trace(e, 'Failed to list dependencies in %s', root)
}
return children
}
/**
* Build up a list of packages, starting from the current directory.
*
* @param {string} start - Root directory to start generation from.
*
* @return {Object} Two lists, of packages and dependencies, with the
* appropriate names.
*/
function getLocalPackages(start) {
var packages = []
var dependencies = []
var candidate = start
while (candidate) {
var root = path.resolve(candidate, 'node_modules')
listPackages(root, packages)
listDependencies(root, dependencies)
var last = candidate
candidate = path.dirname(candidate)
if (last === candidate) break
}
return {packages: packages, dependencies: dependencies}
}
/**
* Generic method for getting packages and dependencies relative to a
* provided root directory.
*
* @param {string} root - Where to start looking -- doesn't add node_modules.
*
* @return {Object} Two lists, of packages and dependencies, with the
* appropriate names.
*/
function getPackages(root) {
var packages = []
var dependencies = []
listPackages(root, packages)
listDependencies(root, dependencies)
return {packages: packages, dependencies: dependencies}
}
/**
* Generate a list of globally-installed packages, if available / accessible
* via the environment.
*
* @return {Object} Two lists, of packages and dependencies, with the
* appropriate names.
*/
function getGlobalPackages() {
Eif (process.config && process.config.variables) {
var prefix = process.config.variables.node_prefix
Eif (prefix) {
var root = path.resolve(prefix, 'lib', 'node_modules')
return getPackages(root)
}
}
return {packages: [], dependencies: []}
}
/**
* Take a list of packages and reduce it to a list of pairs serialized
* to JSON (to simplify things on the collector end) where each
* package appears at most once, with all the versions joined into a
* comma-delimited list.
*
* @return {Array.<String>[]} Sorted list of [name, version] pairs.
*/
function flattenVersions(packages) {
var info = Object.create(null)
packages.forEach(function cb_forEach(pair) {
var p = pair[0]
var v = pair[1]
if (info[p]) {
Eif (info[p].indexOf(v) < 0) info[p].push(v)
} else {
info[p] = [v]
}
})
return Object.keys(info)
.map(function cb_map(key) {
return [key, info[key].join(', ')]
})
.sort()
.map(function cb_map(pair) {
return stringifySync(pair)
})
}
/**
* There are a bunch of settings generated at build time that are useful to
* know for troubleshooting purposes. These settings are only available in 0.7
* and up.
*
* This function works entirely via side effects using the
* addSetting function.
*/
function remapConfigSettings() {
Eif (process.config && process.config.variables) {
var variables = process.config.variables
Object.keys(variables).forEach(function cb_forEach(key) {
if (remapping[key]) {
var value = variables[key]
if (value === true || value === 1) value = 'yes'
if (value === false || value === 0) value = 'no'
addSetting(remapping[key], value)
}
})
}
}
/**
* Scrape the list of packages, following the algorithm as described in the
* node module page:
*
* http://nodejs.org/docs/latest/api/modules.html
*
* This function works entirely via side effects using the addSetting
* function.
*/
function findPackages() {
var local = getLocalPackages(process.cwd())
var all = getGlobalPackages()
var other = {packages: [], dependencies: []}
Iif (process.env.NODE_PATH) {
var paths
if (process.platform === 'win32') { // why. WHY.
paths = process.env.NODE_PATH.split(';')
} else {
paths = process.env.NODE_PATH.split(':')
}
paths.forEach(function cb_forEach(nodePath) {
if (nodePath[0] !== '/') nodePath = path.resolve(process.cwd(), nodePath)
var nextSet = getPackages(nodePath)
other.packages.push.apply(other.packages, nextSet.packages)
other.dependencies.push.apply(other.dependencies, nextSet.dependencies)
})
}
var packages = local.packages
packages.push.apply(packages, all.packages)
packages.push.apply(packages, other.packages)
var dependencies = local.dependencies
dependencies.push.apply(dependencies, all.dependencies)
dependencies.push.apply(dependencies, other.dependencies)
var home
var homeOld
Iif (process.platform === 'win32') {
if (process.env.USERDIR) {
home = getPackages(path.resolve(process.env.USERDIR, '.node_modules'))
homeOld = getPackages(path.resolve(process.env.USERDIR, '.node_libraries'))
}
} else Eif (process.env.HOME) {
home = getPackages(path.resolve(process.env.HOME, '.node_modules'))
homeOld = getPackages(path.resolve(process.env.HOME, '.node_libraries'))
}
Eif (home) {
packages.unshift.apply(packages, home.packages)
dependencies.unshift.apply(dependencies, home.dependencies)
}
Eif (homeOld) {
packages.unshift.apply(packages, homeOld.packages)
dependencies.unshift.apply(dependencies, homeOld.dependencies)
}
addSetting('Packages', flattenVersions(packages))
addSetting('Dependencies', flattenVersions(dependencies))
}
function badOS() {
var badVersion = false
Iif (!process.versions) {
badVersion = true
} else {
var version = process.versions.node.split('.')
Iif (version[1] <= 8 && version[2] <= 5) badVersion = true
}
return badVersion &&
os.arch() === 'x64' &&
os.type() === 'SunOS'
}
/**
* Settings actually get scraped below.
*/
function gatherEnv() {
// in 64-bit SmartOS zones, node <= 0.8.5 pukes on os.cpus()
Eif (!badOS()) addSetting('Processors', os.cpus().length)
addSetting('OS', os.type())
addSetting('OS version', os.release())
addSetting('Node.js version', process.version)
addSetting('Architecture', process.arch)
Eif ('NODE_ENV' in process.env) {
addSetting('NODE_ENV', process.env.NODE_ENV)
}
}
/**
* Reset settings and gather them, built to minimally refactor this file.
*/
function refresh() {
// gather persisted settings
var framework = getSetting('Framework')
var dispatcher = getSetting('Dispatcher')
var packages = getSetting('Packages')
var dependencies = getSetting('Dependencies')
// clearing and rebuilding a global variable
settings = {}
// add persisted settings
Iif (framework.length) {
framework.forEach(function addFrameworks(fw) {
addSetting('Framework', fw)
})
}
Iif (dispatcher.length) {
dispatcher.forEach(function addDispatchers(d) {
addSetting('Dispatcher', d)
})
}
gatherEnv()
remapConfigSettings()
Iif (packages.length && dependencies.length) {
settings.Packages = packages
settings.Dependencies = dependencies
} else {
findPackages()
}
}
// initialize settings
// TODO: Remove this function call and make all environment loading async. At
// the moment, removing this causes tests to fail and it is unclear if it
// is an issue in the tests or in the agent.
refresh()
/**
* Refreshes settings and returns the settings object.
*/
function toJSON() {
// TODO: Do not refresh when JSON-ifying. This takes a _long_ time and blocks
// the event loop. Currently, removing this causes a couple of tests to
// fail (ironically from timing out).
refresh()
var items = []
Object.keys(settings).forEach(function settingKeysForEach(key) {
settings[key].forEach(function settingsValuesForEach(setting) {
items.push([key, setting])
})
})
return items
}
module.exports = {
setFramework: function setFramework(framework) {
addSetting('Framework', framework)
},
setDispatcher: function setDispatcher(dispatcher) {
addSetting('Dispatcher', dispatcher)
},
clearFramework: function clearFramework() {
clearSetting('Framework')
},
clearDispatcher: function clearDispatcher() {
clearSetting('Dispatcher')
},
listPackages: listPackages,
toJSON: toJSON,
get: getSetting,
refresh: refresh
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 | 1 1 1 | 'use strict'
// unreleased flags gating an active feature
exports.prerelease = {
cat: true,
custom_instrumentation: true,
custom_metrics: true,
express5: false,
synthetics: true,
express_segments: true,
native_metrics: true,
promise_segments: false,
reverse_naming_rules: true,
send_request_uri_attribute: false
}
// flags that are no longer used for released features
exports.released = [
'released',
'express4',
'insights',
'postgres',
'mysql_pool',
'proxy',
'custom_events'
]
// flags that are no longer used for unreleased features
exports.unreleased = [
'unreleased'
]
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | 1 2 | 'use strict'
// Return a new copy of this array every time we're called
module.exports = function instrumentations() {
return [
'connect',
'bluebird',
'director',
'express',
'generic-pool',
'hapi',
'memcached',
'mongodb',
'mysql',
'node-cassandra-cql',
'cassandra-driver',
'pg',
'q',
'redis',
'ioredis',
'restify',
'oracle',
'when'
]
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | 1 1 1 1 1 | 'use strict'
var Logger = require('./util/logger')
var fs = require('fs')
// create bootstrapping logger
module.exports = new Logger({
name: 'newrelic_bootstrap',
stream: process.stdout,
level: 'info'
})
/**
* Don't load config.js until this point, because it requires this
* module, and if it gets loaded too early, module.exports will have no
* value.
*/
var config = require('./config.js').getOrCreateInstance()
Iif (config) {
var options = {
name: 'newrelic',
level: config.logging.level,
enabled: config.logging.enabled
}
// create the "real" logger
module.exports = new Logger(options)
if (config.logging.enabled) {
var stream
switch (config.logging.filepath) {
case 'stdout':
stream = process.stdout
break
case 'stderr':
stream = process.stderr
break
default:
stream = fs.createWriteStream(config.logging.filepath, {flags: 'a+'})
stream.on('error', function logStreamOnError(err) {
/* eslint-disable no-console */
// Since our normal logging didn't work, dump this to stderr.
console.error('New Relic failed to open log file ' + config.logging.filepath)
console.error(err)
/* eslint-enable no-console */
})
}
module.exports.pipe(stream)
}
// now tell the config module to switch to the real logger
config.setLogger(module.exports)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 | 1 1 1 1 1 | 'use strict'
var logger = require('./logger.js').child({component: 'dockerinfo'})
var NAMES = require('./metrics/names.js')
module.exports = parseDockerInfo
function parseDockerInfo(agent, data) {
if (!agent.config.utilization || !agent.config.utilization.detect_docker) return null
var cpuCgroup = parseCgroupIds(data).cpu
// if we can't parse the cgroups, or if the cpu is not in a cgroup
var dockerError = agent.metrics.getOrCreateMetric(NAMES.UTILIZATION.DOCKER_ERROR)
if (!cpuCgroup) {
logger.debug('Could not parse cgroup data from: ' + data)
dockerError.incrementCallCount()
return null
}
// if cpu isn't in a cgroup
if (cpuCgroup === '/') return null
var patterns = [
/^\/docker\/([0-9a-f]+)$/, // docker native driver w/out systemd
/^\/system\.slice\/docker-([0-9a-f]+)\.scope$/, // with systemd
/^\/lxc\/([0-9a-f]+)$/ // docker lxc driver
]
for (var i = 0; i < patterns.length; i++) {
var pattern = patterns[i]
var matches = cpuCgroup.match(pattern)
if (matches) {
var id = matches[1]
if (id.length !== 64) {
dockerError.incrementCallCount()
logger.debug('Encountered a malformed docker id: ', id)
return null
}
return id
}
}
logger.debug('Unable to recognise cgroup format')
return null
}
function parseCgroupIds(cgroupInfo) {
var cgroupIds = {}
cgroupInfo.split('\n').forEach(function parseCgroupInfo(line) {
var parts = line.split(':')
if (parts.length !== 3) return
var subsystems = parts[1]
var cgroupId = parts[2]
subsystems.split(',').forEach(function assignGroupIds(subsystem) {
cgroupIds[subsystem] = cgroupId
})
})
return cgroupIds
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 | 1 1 1 1 1 1 | 'use strict'
var logger = require('./logger.js').child({component: 'proc-cpuinfo'})
module.exports = parseProcCPUInfo
function parseProcCPUInfo(data) {
var relevantAttributes = [
'processor',
'physical id',
'cpu cores',
'core id'
]
var processorStats = {
logical: null,
cores: null,
packages: null
}
// seperate the processors
var splitData = data.split('\n')
.map(function formatAttribute(attr) {
return attr.split(':')
.map(function eliminateExtraWhitespace(s) {
return s.replace(/\\r|\\t| {2,}/g, '').trim()
})
})
var validData = splitData.filter(function checkForValidAttrs(a) {
return a.length === 2 && relevantAttributes.indexOf(a[0]) !== -1
})
if (validData.length === 0) {
logger.debug('No applicable cpu attributes found')
return processorStats
}
splitData = collapseMultilineValues(splitData)
var processors = seperateProcessors(splitData)
processorStats = countProcessorStats(processors)
if (!processorStats.cores) {
if (processorStats.logical === 1) {
// some older, single-core processors might not list ids,
// so we'll mark them 1
processorStats.cores = 1
processorStats.packages = 1
} else {
// there is no way of knowing how many packages
// or cores there are
processorStats.cores = null
processorStats.packages = null
}
}
return processorStats
}
// some values are split up over multiple lines, these won't be broken
// by split(':'), and should be folded into the last seen valid value
function collapseMultilineValues(li) {
var tmp = []
var last
for (var i = 0; i < li.length; ++i) {
if (li[i].length === 2) {
// store the last valid entry to append invalid entries to
last = li[i]
tmp.push(last)
} else {
last[1] += li[i][0]
}
}
return tmp
}
// walk through the processed list of key, value pairs and populate
// objects till you find a collision
function seperateProcessors(processorData) {
var processors = []
var processor = {}
for (var i = 0; i < processorData.length; ++i) {
var key = processorData[i][0]
var value = processorData[i][1]
if (processor[key] !== undefined) {
processors.push(processor)
processor = {}
}
processor[key] = value
}
processors.push(processor)
return processors
}
function countProcessorStats(processors) {
var phys = []
var cores = []
for (var i = 0; i < processors.length; i++) {
var processor = processors[i]
if (processor['physical id'] &&
processor['cpu cores'] &&
phys.indexOf(processor['physical id']) === -1) {
phys.push(processor['physical id'])
cores.push(processor['cpu cores'])
}
}
return {
logical: processors.length,
cores: cores
.map(function convertToInt(s) {
return parseInt(s, 10)
})
.reduce(function sum(a, b) {
return a + b
}, 0),
packages: phys.length
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | 1 1 1 | 'use strict'
var logger = require('./logger.js').child({component: 'proc-meminfo'})
module.exports = parseProcMeminfo
function parseProcMeminfo(data) {
var mem_total = parseInt(data.replace(/MemTotal:\s*(\d*)\skB/, '$1'), 10)
if (mem_total) return mem_total / 1024
logger.debug('Unable to parse memory string:', data)
return null
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | 1 1 1 1 1 1 1 | 'use strict' // from http://en.wikipedia.org/wiki/Reservoir_sampling function Reservoir(limit) { this.limit = limit || 10 this.seen = 0 this._data = [] } Reservoir.prototype.overflow = function overflow() { var diff = this.seen - this.limit return diff >= 0 ? diff : 0 } Reservoir.prototype.add = function add(item) { if (this.seen < this.limit) { this._data.push(item) } else { // Take a number between 0 and n + 1, drop the element at that index // from the array. If the element to drop is the (n + 1)th, the new item is // not added, otherwise the new item replaces the item that was // dropped. // This is effectively the same as adding the new element to the // end, swapping the last element (the new one) with a random element in the list, // then dropping the last element (the potentially swapped one) in the list. var toReplace = Math.floor(Math.random() * (this.seen + 2)) if (toReplace < this.limit) this._data[toReplace] = item } this.seen++ } Reservoir.prototype.toArray = function toArray() { return this._data } Reservoir.prototype.merge = function merge(items) { if (!items || !items.length) return if (items === this._data) return for (var i = 0; i < items.length; i++) { this.add(items[i]) } } Reservoir.prototype.setLimit = function setLimit(newLimit) { this.limit = newLimit if (this._data.length > newLimit) { this._data = this._data.slice(0, newLimit) } } module.exports = Reservoir |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var NAMES = require('./metrics/names')
var logger = require('./logger').child({component: 'sampler'})
var Timer = require('./timer')
var os = require('os')
/*
*
* CONSTANTS
*
*/
var MILLIS = 1e3
var MICROS = 1e6
var NANOS = 1e9
var CPUS = os.cpus().length
var SAMPLE_INTERVAL = 15 * MILLIS
var samplers = []
function Sampler(sampler, interval) {
this.id = setInterval(sampler, interval)
// timer.unref only in 0.9+
if (this.id.unref) this.id.unref()
}
Sampler.prototype.stop = function stop() {
clearInterval(this.id)
}
function recordQueueTime(agent, timer) {
timer.end()
agent.metrics.measureMilliseconds(NAMES.EVENTS.WAIT, null, timer.getDurationInMillis())
}
function sampleMemory(agent) {
return function memorySampler() {
try {
var mem = process.memoryUsage()
agent.metrics.measureBytes(NAMES.MEMORY.PHYSICAL, mem.rss)
agent.metrics.measureBytes(NAMES.MEMORY.USED_HEAP, mem.heapUsed)
agent.metrics.measureBytes(NAMES.MEMORY.MAX_HEAP, mem.heapTotal)
agent.metrics.measureBytes(NAMES.MEMORY.FREE_HEAP, mem.heapTotal - mem.heapUsed)
agent.metrics.measureBytes(NAMES.MEMORY.USED_NONHEAP, mem.rss - mem.heapTotal)
logger.trace('Recorded memory:', mem)
} catch (e) {
logger.debug('Could not record memory usage', e)
}
}
}
function checkEvents(agent) {
return function eventSampler() {
var timer = new Timer()
timer.begin()
setTimeout(recordQueueTime.bind(null, agent, timer), 0)
}
}
function getCpuSample(lastSample) {
try {
return process.cpuUsage(lastSample)
} catch (e) {
logger.debug('Could not record cpu usage', e)
return null
}
}
function generateCPUMetricRecorder(agent) {
var lastSampleTime
// userTime and sysTime are in seconds
return function recordCPUMetrics(userTime, sysTime) {
var elapsedUptime
if (!lastSampleTime) {
elapsedUptime = process.uptime()
} else {
elapsedUptime = (Date.now() - lastSampleTime) / MILLIS
}
var totalCpuTime = CPUS * elapsedUptime
lastSampleTime = Date.now()
var userUtil = userTime / totalCpuTime
var sysUtil = sysTime / totalCpuTime
recordValue(agent, NAMES.CPU.USER_TIME, userTime)
recordValue(agent, NAMES.CPU.SYSTEM_TIME, sysTime)
recordValue(agent, NAMES.CPU.USER_UTILIZATION, userUtil)
recordValue(agent, NAMES.CPU.SYSTEM_UTILIZATION, sysUtil)
}
}
function sampleCpu(agent) {
var lastSample
var recordCPU = generateCPUMetricRecorder(agent)
return function cpuSampler() {
var cpuSample = getCpuSample(lastSample)
lastSample = getCpuSample()
if (lastSample == null) {
return
}
recordCPU(cpuSample.user / MICROS, cpuSample.system / MICROS)
}
}
function sampleCpuNative(agent, nativeMetrics) {
var recordCPU = generateCPUMetricRecorder(agent)
nativeMetrics.on('usage', function collectResourceUsage(usage) {
recordCPU(usage.diff.ru_utime / MILLIS, usage.diff.ru_stime / MILLIS)
})
return function cpuSampler() {
// NOOP?
}
}
function sampleLoop(agent, nativeMetrics) {
return function loopSampler() {
var loopMetrics = nativeMetrics.getLoopMetrics()
// convert from microseconds to seconds
loopMetrics.usage.min = loopMetrics.usage.min / MICROS
loopMetrics.usage.max = loopMetrics.usage.max / MICROS
loopMetrics.usage.total = loopMetrics.usage.total / MICROS
loopMetrics.usage.sumOfSquares = loopMetrics.usage.sumOfSquares / (MICROS * MICROS)
recordCompleteMetric(agent, NAMES.LOOP.USAGE, loopMetrics.usage)
}
}
function sampleGc(agent, nativeMetrics) {
// Hook into the stats event to accumulate total pause time and record per-run
// pause time metric.
nativeMetrics.on('gc', function onGCStatsEvent(stats) {
var duration = stats.duration / NANOS
recordValue(agent, NAMES.GC.PAUSE_TIME, duration)
if (stats.type) {
recordValue(agent, NAMES.GC.PREFIX + stats.type, duration)
} else {
logger.debug(stats, 'Unknown GC type %j', stats.typeId)
}
})
return function gcSampler() {
// NOOP?
}
}
var sampler = module.exports = {
state: 'stopped',
sampleMemory: sampleMemory,
checkEvents: checkEvents,
sampleCpu: sampleCpu,
sampleGc: sampleGc,
sampleLoop: sampleLoop,
nativeMetrics: null,
start: function start(agent) {
samplers.push(new Sampler(sampleMemory(agent), 5 * MILLIS))
samplers.push(new Sampler(checkEvents(agent), SAMPLE_INTERVAL))
var metricFeatureFlag = agent.config.feature_flag.native_metrics
// This requires a native module which may have failed to build.
if (!this.nativeMetrics) {
if (metricFeatureFlag) {
try {
this.nativeMetrics = require('@newrelic/native-metrics')({
timeout: SAMPLE_INTERVAL
})
} catch (err) {
logger.info(
{error: {message: err.message, stack: err.stack}},
'Not adding native metric sampler.'
)
agent.metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.DEPENDENCIES + '/NoNativeMetricsModule'
).incrementCallCount()
}
} else {
logger.info('Feature flag for native metrics is false')
}
}
if (this.nativeMetrics) {
if (!this.nativeMetrics.bound) {
this.nativeMetrics.bind(SAMPLE_INTERVAL)
}
// Add GC events if available.
if (this.nativeMetrics.gcEnabled) {
samplers.push(new Sampler(sampleGc(agent, this.nativeMetrics), SAMPLE_INTERVAL))
}
// Add loop metrics if available.
if (this.nativeMetrics.loopEnabled) {
samplers.push(new Sampler(sampleLoop(agent, this.nativeMetrics), SAMPLE_INTERVAL))
}
}
// Add CPU sampling using the built-in data if available, otherwise pulling
// from the native module.
if (process.cpuUsage) { // introduced in 6.1.0
samplers.push(new Sampler(sampleCpu(agent), SAMPLE_INTERVAL))
} else if (this.nativeMetrics && this.nativeMetrics.usageEnabled) {
samplers.push(
new Sampler(sampleCpuNative(agent, this.nativeMetrics), SAMPLE_INTERVAL)
)
} else {
logger.debug('Not adding CPU metric sampler.')
}
sampler.state = 'running'
},
stop: function stop() {
samplers.forEach(function forEachSampler(s) {
s.stop()
})
samplers = []
sampler.state = 'stopped'
if (this.nativeMetrics) {
this.nativeMetrics.unbind()
this.nativeMetrics.removeAllListeners()
// Setting this.nativeMetrics to null allows us to config a new
// nativeMetrics object after the first start call.
this.nativeMetrics = null
}
}
}
function recordValue(agent, metric, value) {
var stats = agent.metrics.getOrCreateMetric(metric)
stats.recordValue(value)
logger.trace('Recorded metric %s: %j', metric, value)
}
function recordCompleteMetric(agent, metricName, metric) {
var stats = agent.metrics.getOrCreateMetric(metricName)
stats.merge(metric)
logger.trace('Recorded metric %s: %j', metricName, metric)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 | 1 1 1 1 1 1 1 1 1 | 'use strict'
var path = require('path')
var fs = require('fs')
var logger = require('./logger').child({component: 'shimmer'})
var INSTRUMENTATION = require('./instrumentations')()
/*
*
* CONSTANTS
*
*/
var CORE_INSTRUMENTATION = {
child_process: 'child_process.js',
crypto: 'crypto.js',
domain: 'domain.js',
dns: 'dns.js',
fs: 'fs.js',
http: 'http.js',
https: 'http.js',
net: 'net.js',
timers: 'timers.js',
zlib: 'zlib.js'
}
/**
* Unwrapping is only likely to be used by test code, and is a fairly drastic
* maneuver, but it should be pretty safe if there's a desire to reboot the
* agent in flight.
*
* All of the wrapped methods are tracked in this variable and used by unwrapAll
* below.
*/
var instrumented = []
/**
* All instrumentation files must export the same interface: a single
* initialization function that takes the agent and the module to be
* instrumented.
*/
function instrument(agent, shortName, fileName, nodule, moduleName) {
var fullPath = path.resolve(fileName)
if (!fs.existsSync(fileName)) {
return logger.warn(
'Tried to load instrumentation from %s, but file does not exist',
fullPath
)
}
try {
require(fileName)(agent, nodule, moduleName)
} catch (error) {
logger.warn(
error,
'Failed to instrument module %s using %s',
path.basename(shortName, '.js'),
fullPath
)
}
}
function _postLoad(agent, nodule, name) {
var instrumentation
var base = path.basename(name)
// to allow for instrumenting both 'pg' and 'pg.js'.
if (name === 'pg.js') {
instrumentation = 'pg'
} if (name === 'mysql2') {
// mysql2 (https://github.com/sidorares/node-mysql2) is a drop in replacement for
// mysql which conforms to the existing mysql API. If we see mysql2, treat it as
// mysql
instrumentation = 'mysql'
} else {
instrumentation = base
}
// necessary to prevent instrument() from causing an infinite loop
if (INSTRUMENTATION.indexOf(instrumentation) !== -1) {
logger.trace('Instrumenting %s.', base)
var filename = path.join(__dirname, 'instrumentation', instrumentation + '.js')
instrument(agent, base, filename, nodule)
}
return nodule
}
var shimmer = module.exports = {
/**
* If debug isn't false, the agent will retain references to wrapped methods
* for the entire lifetime of the agent. Some instrumentation depends on
* wrapping functions on individual objects, and this will cause the agent
* to retain references to a large number of dead objects.
*/
debug: false,
/**
* Detects if the given function has already been wrapped.
*
* @param {function} fn - The function to look for a wrapper on.
*
* @return {bool} True if `fn` exists and has an attached original, else false.
*/
isWrapped: function isWrapped(fn) {
return !!(fn && fn.__NR_original)
},
/**
* Don't throw, but do log and bail out if wrapping fails.
*
* Provide an escape hatch by creating a closure around the original method
* and object / module wrapped into a helper function that will restore the
* original function / method. See Sinon for a systematic use of this
* pattern.
*
* @param {object} nodule Class or module containing the function to wrap.
* @param {object} noduleName Human-readable module / Class name. More
* helpful than you'd think.
* @param {string} methods One or more names of methods or functions to extract
* and wrap.
* @param {function} wrapper A generator that, when called, returns a
* wrapped version of the original function.
*/
wrapMethod: function wrapMethod(nodule, noduleName, methods, wrapper) {
if (!methods) {
return logger.warn(new Error(),
"Must include a method name to wrap. Called from:")
}
if (!noduleName) noduleName = '[unknown]'
if (!Array.isArray(methods)) methods = [methods]
methods.forEach(function cb_forEach(method) {
var fqmn = noduleName + '.' + method
if (!nodule) return logger.debug("Can't wrap %s from nonexistent object.",
fqmn)
if (!wrapper) return logger.debug("Can't wrap %s without a wrapper generator.",
fqmn)
var original = nodule[method]
if (!original) return logger.trace("%s not defined, so not wrapping.", fqmn)
if (original.__NR_unwrap) return logger.debug("%s already wrapped by agent.", fqmn)
var wrapped = wrapper(original, method)
wrapped.__NR_original = original
wrapped.__NR_unwrap = function __NR_unwrap() {
nodule[method] = original
logger.trace("Removed instrumentation from %s.", fqmn)
}
nodule[method] = wrapped
if (shimmer.debug) instrumented.push(wrapped)
logger.trace("Instrumented %s.", fqmn)
})
},
/**
* Sometimes you gotta do some crazy stuff to get the job done. Instead of using
* regular monkeypatching, wrapDeprecated allows you to pass in a getter and setter
* and then uses defineProperty to replace the original property with an
* accessor. Note that responsibility for unwrapping is not handled by this
* function.
*
* @param {object} nodule Class or module containing the property to
* wrap.
* @param {object} noduleName Human-readable module / Class name. More
* helpful than you'd think.
* @param {string} property The property to replace with the accessor.
* @param {function} options Optional getter and setter to use for the accessor.
*
* @returns {object} The original value of the property.
*/
wrapDeprecated: function wrapDeprecated(nodule, noduleName, property, options) {
if (!property) {
logger.warn(new Error(), "Must include a function name to wrap. Called from:")
return
}
if (!noduleName) noduleName = '[unknown]'
var fqmn = noduleName + '.' + property
if (!nodule) {
logger.debug("Can't wrap %s from nonexistent object.", fqmn)
return
}
var original = nodule[property]
if (!original) {
logger.trace("%s not defined, so not wrapping.", fqmn)
return
}
delete nodule[property]
var descriptor = {
configurable: true,
enumerable: true
}
if (options.get) descriptor.get = options.get
if (options.set) descriptor.set = options.set
Object.defineProperty(nodule, property, descriptor)
logger.trace("Instrumented %s.", fqmn)
if (shimmer.debug) {
instrumented.push({
__NR_unwrap: function unwrapDeprecated() {
delete nodule[property]
nodule[property] = original
}
})
}
return original
},
unwrapMethod: function unwrapMethod(nodule, noduleName, method) {
if (!noduleName) noduleName = '[unknown]'
if (!method) return logger.debug("Must include a method name to unwrap. " +
"Called from: %s", new Error().stack)
var fqmn = noduleName + '.' + method
if (!nodule) return logger.debug("Can't unwrap %s from nonexistent object.",
fqmn)
var wrapped = nodule[method]
// keep instrumented up to date
var pos = instrumented.indexOf(wrapped)
if (pos !== -1) instrumented.splice(pos, 1)
if (!wrapped) return logger.debug("%s not defined, so not unwrapping.", fqmn)
if (!wrapped.__NR_unwrap) return logger.debug("%s isn't unwrappable.", fqmn)
wrapped.__NR_unwrap()
},
unwrapAll: function unwrapAll() {
instrumented.forEach(function cb_forEach(wrapper) {
wrapper.__NR_unwrap()
})
instrumented = []
},
/**
* Patch the module.load function so that we see modules loading and
* have an opportunity to patch them with instrumentation.
*/
patchModule: function patchModule(agent) {
logger.trace("Wrapping module loader.")
var Module = require('module')
shimmer.wrapMethod(Module, 'Module', '_load', function cb_wrapMethod(load) {
return function cls_wrapMethod(file) {
return _postLoad(agent, load.apply(this, arguments), file)
}
})
},
unpatchModule: function unpatchModule() {
logger.trace("Unwrapping to previous module loader.")
var Module = require('module')
shimmer.unwrapMethod(Module, 'Module', '_load')
},
bootstrapInstrumentation: function bootstrapInstrumentation(agent) {
var globalsFilepath = path.join(__dirname, 'instrumentation', 'core', 'globals.js')
instrument(agent, 'globals', globalsFilepath, global)
Object.keys(CORE_INSTRUMENTATION).forEach(function cb_forEach(mojule) {
var filename = CORE_INSTRUMENTATION[mojule]
var filepath = path.join(__dirname, 'instrumentation/core', filename)
var uninstrumented
try {
uninstrumented = require(mojule)
} catch (err) {
logger.trace(
'Could not load core module %s got error %s',
mojule,
err
)
}
instrument(agent, filename, filepath, uninstrumented, mojule)
})
},
/**
* NOT FOR USE IN PRODUCTION CODE
*
* If an instrumented module has a dependency on another instrumented module,
* and multiple tests are being run in a single test suite with their own
* setup and teardown between tests, it's possible transitive dependencies
* will be unwrapped in the module cache in-place (which needs to happen to
* prevent stale closures from channeling instrumentation data to incorrect
* agents, but which means the transitive dependencies won't get re-wrapped
* the next time the parent module is required).
*
* Since this only applies in test code, it's not worth the drastic
* monkeypatching to Module necessary to walk the list of child modules and
* re-wrap them.
*
* Use this to re-apply any applicable instrumentation.
*/
reinstrument: function reinstrument(agent, modulePath) {
return _postLoad(agent, require(modulePath), modulePath)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var exec = require('child_process').exec
var fetchAWSInfo = require('./aws-info')
var fs = require('fs')
var logger = require('./logger.js').child({component: 'system-info'})
var os = require('os')
var parseCpuInfo = require('./parse-proc-cpuinfo')
var parseDockerInfo = require('./parse-dockerinfo')
var parseMemInfo = require('./parse-proc-meminfo')
var platform = os.platform()
module.exports = fetchSystemInfo
function isInteger(i) {
return i === parseInt(i, 10)
}
function fetchSystemInfo(agent, callback) {
var config = agent.config
var systemInfo = {
processorArch: os.arch()
}
var utilizationConfig = {}
if (config.utilization) {
var configProcessors = config.utilization.logical_processors
var configRam = config.utilization.total_ram_mib
var configHostname = config.utilization.billing_hostname
if (configProcessors) {
var parsedConfigProcessors = parseFloat(configProcessors, 10)
if (!isNaN(parsedConfigProcessors) && isInteger(parsedConfigProcessors)) {
utilizationConfig.logical_processors = parsedConfigProcessors
} else {
logger.info(
'%s supplied in config for utilization.logical_processors, expected a number',
configProcessors
)
}
}
if (configRam) {
var parsedConfigRam = parseFloat(configRam, 10)
if (!isNaN(parsedConfigRam) && isInteger(parsedConfigRam)) {
utilizationConfig.total_ram_mib = parsedConfigRam
} else {
logger.info(
'%s supplied in config for utilization.total_ram_mib, expected a number',
configRam
)
}
}
if (configHostname) {
if (typeof configHostname === 'string') {
utilizationConfig.hostname = configHostname
} else {
logger.info(
'%s supplied in config for utilization.Hostname, expected a string',
configHostname
)
}
}
if (Object.keys(utilizationConfig).length > 0) {
systemInfo.config = utilizationConfig
}
}
var tasksDone = 0
var numTasks = 5
function finishedResponse() {
if (++tasksDone === numTasks) return callback(systemInfo)
}
module.exports._getProcessorStats(function getProcessCB(processorStats) {
systemInfo.packages = processorStats.packages
systemInfo.logicalProcessors = processorStats.logical
systemInfo.cores = processorStats.cores
finishedResponse()
})
module.exports._getMemoryStats(function getMemCB(memory) {
systemInfo.memory = memory
finishedResponse()
})
getKernelVersion(function getVersionCB(kernelVersion) {
systemInfo.kernelVersion = kernelVersion
finishedResponse()
})
module.exports._getDockerContainerId(agent, function getContainerId(containerId) {
if (containerId) {
systemInfo.docker = {
id: containerId
}
}
finishedResponse()
})
fetchAWSInfo(agent, function getAWSInfo(aws) {
systemInfo.aws = aws
finishedResponse()
})
}
// placed on module for mocking purposes in tests
module.exports._getProcessorStats = function getProcessorStats(callback) {
var processorStats = {
logical: null,
cores: null,
packages: null
}
if (platform.match(/darwin/i)) {
getSysctlValue(['hw.packages'], function getPackages(packages) {
getSysctlValue(['hw.physicalcpu_max', 'hw.physicalcpu'],
function getCores(cores) {
getSysctlValue(['hw.logicalcpu_max', 'hw.logicalcpu', 'hw.ncpu'],
function getLogicalCpu(logical) {
processorStats.logical = parseFloat(logical, 10)
processorStats.cores = parseFloat(cores, 10)
processorStats.packages = parseFloat(packages, 10)
for (var key in processorStats) {
if (!processorStats[key] || !isInteger(processorStats[key])) {
processorStats[key] = null
}
}
callback(processorStats)
})
})
})
} else if (platform.match(/bsd/i)) {
getSysctlValue(['hw.ncpu'], function getLogicalCpu(logical) {
processorStats.logical = logical
callback(processorStats)
})
} else if (platform.match(/linux/i)) {
readProc('/proc/cpuinfo', function parseProc(data) {
callback(parseCpuInfo(data))
})
} else {
logger.debug('Unknown platform: ' + platform + ', could not retrieve processor info')
callback(processorStats)
}
}
// placed on module for mocking purposes in tests
module.exports._getMemoryStats = function getMemoryStats(callback) {
if (platform.match(/darwin/i)) {
getSysctlValue(['hw.memsize'], function getMem(memory) {
callback(parseInt(memory, 10) / (1024 * 1024))
})
} else if (platform.match(/bsd/i)) {
getSysctlValue(['hw.realmem'], function getMem(memory) {
callback(parseInt(memory, 10) / (1024 * 1024))
})
} else if (platform.match(/linux/i)) {
readProc('/proc/meminfo', function parseProc(data) {
callback(parseMemInfo(data))
})
} else {
logger.debug('Unknown platform: ' + platform + ', could not retrieve memory info')
callback(null)
}
}
function getKernelVersion(callback) {
if (platform.match(/darwin/i)) {
getSysctlValue(['kern.version'], function getMem(version) {
callback(version)
})
} else if (platform.match(/bsd/i)) {
getSysctlValue(['kern.version'], function getMem(version) {
callback(version)
})
} else if (platform.match(/linux/i)) {
readProc('/proc/version', function parseProc(data) {
callback(data)
})
} else {
logger.debug('Unknown platform' + platform + ', could not read kernel version')
callback(null)
}
}
module.exports._getDockerContainerId = function getDockerContainerId(agent, callback) {
if (!platform.match(/linux/i)) {
logger.debug('Platform is not a flavor of linux, omitting docker info')
callback(null)
} else {
readProc('/proc/self/cgroup', function getCGroup(data) {
if (!data) callback(null)
else callback(parseDockerInfo(agent, data))
})
}
}
function getSysctlValue(names, callback) {
if (!names) return callback(null)
var returned = false
var ran = 0
names.forEach(function sysctlName(name) {
exec('sysctl -n ' + name, respond)
function respond(err, stdout, stderr) {
if (returned) return
if (err) {
logger.debug('Error when trying to run: sysctl -n ' + name + ': %s', err.message)
callback(null)
returned = true
} else if (!stderr) {
callback(stdout)
returned = true
}
if (++ran === names.length && !returned) {
logger.debug('No sysctl info found for names: ' + names.toString())
callback(null)
}
}
})
}
function readProc(path, callback) {
fs.readFile(path, function readProcFile(err, data) {
if (err) {
logger.error('Error when trying to read ' + path, err)
callback(null)
} else {
callback(data.toString())
}
})
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict' /** * Explicit enumeration of the states a transaction can be in: * * PENDING upon instantiation (implicitly, no start time set) * RUNNING while timer is running (implicitly, start time is set but no stop * time is set). * STOPPED timer has been completed (implicitly, start time and stop time * are set, but the timer has not yet been harvested). * DEAD timer has been harvested and can only have its duration read. */ var PENDING = 1 var RUNNING = 2 var STOPPED = 3 function hrToMillis(hr) { // process.hrTime gives you [second, nanosecond] duration pairs return (hr[0] * 1e3) + (hr[1] / 1e6) } /** * A mildly tricky timer that tracks its own state and allows its duration * to be set manually. */ function Timer() { this.state = PENDING this.touched = false this.duration = null this.hrDuration = null this.hrstart = null this.durationInMillis = null } /** * Start measuring time elapsed. * * Uses process.hrtime if available, Date.now() otherwise. */ Timer.prototype.begin = function begin() { if (this.state > PENDING) return this.start = Date.now() // need to put a guard on this for compatibility with Node < 0.8 if (process.hrtime) this.hrstart = process.hrtime() this.state = RUNNING } /** * End measurement. */ Timer.prototype.end = function end() { if (this.state > RUNNING) return if (this.state === PENDING) this.begin() if (process.hrtime) this.hrDuration = process.hrtime(this.hrstart) this.touched = true this.duration = Date.now() - this.start this.state = STOPPED } /** * Update the duration of the timer without ending it.. */ Timer.prototype.touch = function touch() { this.touched = true if (this.state > RUNNING) return if (this.state === PENDING) this.begin() if (process.hrtime) this.hrDuration = process.hrtime(this.hrstart) this.duration = Date.now() - this.start } /** * End the segment if it is still running, if touched use that time instead of * "now". Returns a boolean indicating whether the end time changed. */ Timer.prototype.softEnd = function softEnd() { if (this.state > RUNNING) return false if (this.state === PENDING) this.begin() this.state = STOPPED if (this.touched) return false if (process.hrtime) this.hrDuration = process.hrtime(this.hrstart) this.touched = true this.duration = Date.now() - this.start return true } /** * @return {bool} Is this timer currently running? */ Timer.prototype.isRunning = function isRunning() { return this.state === RUNNING } /** * @return {bool} Is this timer still alive? */ Timer.prototype.isActive = function isActive() { return this.state < STOPPED } /** * @return {bool} Has the timer been touched or ended? */ Timer.prototype.hasEnd = function hasEnd() { return !!this.hrDuration } /* * Sets duration and stops the timer, since the passed-in duration will take precendence * over the measured duration. * @param {number} duration The duration the timer should report. */ Timer.prototype.overwriteDurationInMillis = overwriteDurationInMillis function overwriteDurationInMillis(duration) { this.touched = true this.durationInMillis = duration this.state = STOPPED } /** * When testing, it's convenient to be able to control time. Stops the timer * as a byproduct. * * @param {number} duration How long the timer ran. * @param {number} start When the timer started running (optional). */ Timer.prototype.setDurationInMillis = function setDurationInMillis(duration, start) { if (this.state > RUNNING) return if (this.state === PENDING) if (!start && start !== 0) this.begin() this.state = STOPPED this.durationInMillis = duration // this assignment is incorrect, process.hrtime doesn't time from epoch, which // is the assumption being made here. since hrstart isn't used // anywhere except to calculate duration, and we are setting duration // this is fine. this.hrstart = [Math.floor(start / 1e3), start % 1e3 * 1e6] this.start = start } /** * Returns how long the timer has been running (if it's still running) or * how long it ran (if it's been ended or touched). */ Timer.prototype.getDurationInMillis = function getDurationInMillis() { if (this.state === PENDING) return 0 // only set by setDurationInMillis if (this.durationInMillis !== null && this.durationInMillis >= 0) { return this.durationInMillis } // prioritize .end() and .touch() if (this.hrDuration) { return hrToMillis(this.hrDuration) } if (this.duration) { return this.duration } if (process.hrtime) { return hrToMillis(process.hrtime(this.hrstart)) } return Date.now() - this.start } /** * Get a single object containing the interval this timer was active. * * @return {Array} 2-tuple of start time in milliseconds, end time in * milliseconds. */ Timer.prototype.toRange = function toRange() { return [this.start, this.start + this.getDurationInMillis()] } /** * Abstract away the nonsense related to having both an * hrtime start time and a regular one, and always return * milliseconds since start. * * @param {Timer} other The point relative to which this timer started. * @return {number} The offset in (floating-point) milliseconds. */ Timer.prototype.startedRelativeTo = function startedRelativeTo(other) { if (this.hrstart && other.hrstart && process.hrtime) { var s = this.hrstart[0] - other.hrstart[0] var ns = this.hrstart[1] - other.hrstart[1] return hrToMillis([s, ns]) } return this.start - other.start } /** * Returns true if this timer ends after the other. */ Timer.prototype.endsAfter = function compare(other) { return (this.getDurationInMillis() + this.start) > (other.getDurationInMillis() + other.start) } module.exports = Timer |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 | 1 1 1 1 1 1 1 1 1 1 1 100 100 12 12 12 1 1 100 100 100 1 | 'use strict'
var path = require('path')
var logger = require('./logger')
var NAMES = require('./metrics/names')
var INSTRUMENTATIONS = require('./instrumentations')()
module.exports = {
check: check,
createMetrics: createMetrics
}
// Special case since we do some hackish stuff in lib/shimmer.js to make pg.js
// work
INSTRUMENTATIONS.push('pg.js')
// Static variable holding list of un-instrumented modules for use in the future
var uninstrumented = []
// Log a helpful message about un-instrumented modules
function logUninstrumented() {
Iif (uninstrumented.length > 0) {
var message =
'The newrelic module must be the first module required.\n' +
'The following modules were required before newrelic and are not being ' +
'instrumented:'
uninstrumented.forEach(function buildMessage(module) {
message += '\n\t' + module.name + ': ' + module.filename
})
logger.warn(message)
}
}
// Create Supportability/Uninstrumented/<module> metrics
//
// @param metrics Agent metrics aggregator
function createMetrics(metrics) {
if (uninstrumented.length > 0) {
metrics.getOrCreateMetric(NAMES.SUPPORTABILITY.UNINSTRUMENTED).incrementCallCount()
}
uninstrumented.forEach(function addMetrics(module) {
metrics.getOrCreateMetric(
NAMES.SUPPORTABILITY.UNINSTRUMENTED + '/' + module.name
).incrementCallCount()
})
}
// Determine module name from filename of module's main script
//
// Heuristic: take the first path name that isn't 'index.js' or 'lib'.
//
// @param filename Filename of module's main script
// @return Name of module
function moduleNameFromFilename(filename) {
var name = path.basename(filename, '.js')
if (name !== 'index') return name
var paths = filename.split(path.sep).slice(0, -1)
for (var i = paths.length - 1; i >= 0; i--) {
Eif (paths[i] !== 'lib') return paths[i]
}
}
// Check for any instrument-able modules that have already been loaded. This does
// not check core modules as we don't have access to the core module loader
// cache. But, users probably are missing instrumentation for other modules if
// they are missing instrumentation for core modules.
function check() {
for (var filename in require.cache) {
Iif (!require.cache.hasOwnProperty(filename)) {
continue
}
var name = moduleNameFromFilename(filename)
Iif (INSTRUMENTATIONS.indexOf(name) !== -1) {
uninstrumented.push({name: name, filename: filename})
}
}
logUninstrumented()
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| api.js | 15.93% | (29 / 182) | 0% | (0 / 90) | 0% | (0 / 29) | 18.13% | (29 / 160) | |
| facts.js | 23.53% | (4 / 17) | 0% | (0 / 12) | 0% | (0 / 2) | 23.53% | (4 / 17) | |
| http-agents.js | 25% | (6 / 24) | 0% | (0 / 16) | 0% | (0 / 2) | 25% | (6 / 24) | |
| parse-response.js | 9.62% | (5 / 52) | 0% | (0 / 36) | 0% | (0 / 4) | 10.87% | (5 / 46) | |
| remote-method.js | 25.58% | (33 / 129) | 0% | (0 / 63) | 0% | (0 / 18) | 27.73% | (33 / 119) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'collector_api'})
var facts = require('./facts.js')
var RemoteMethod = require('./remote-method.js')
/*
*
* CONSTANTS
*
*/
// just to make clear what's going on
var TO_MILLIS = 1e3
// taken directly from Python agent's newrelic.core.application
var BACKOFFS = [
{interval: 15, warn: false},
{interval: 15, warn: false},
{interval: 30, warn: false},
{interval: 60, warn: true},
{interval: 120, warn: false},
{interval: 300, warn: false}
]
var ERRORS = {
INVALID_LICENSE: 'NewRelic::Agent::LicenseException',
LIMIT_EXCEEDED: 'NewRelic::Agent::InternalLimitExceeded',
RESTART: 'NewRelic::Agent::ForceRestartException',
DISCONNECT: 'NewRelic::Agent::ForceDisconnectException',
MAINTENANCE: 'NewRelic::Agent::MaintenanceError',
RUNTIME: 'RuntimeError'
}
var HTTP_REQUEST_TOO_LARGE = 413
var HTTP_UNSUPPORTED_MEDIA_TYPE = 415
var HTTP_SERVER_INTERNAL = 500
var HTTP_LOL_COLLECTOR = 503
function dumpErrors(errors, name) {
var index = 1
errors.forEach(function cb_forEach(error) {
logger.trace(error, "Error %s during %s:", index++, name)
if (error.laterErrors) {
error.laterErrors.forEach(function cb_forEach(laterError) {
logger.trace(laterError, "Error %s during %s:", index++, name)
})
}
})
}
function CollectorAPI(agent) {
this._agent = agent
/* RemoteMethods can be reused and have little per-object state, so why not
* save some GC time?
*/
this._methods = {
redirect: new RemoteMethod('get_redirect_host', agent.config),
handshake: new RemoteMethod('connect', agent.config),
settings: new RemoteMethod('agent_settings', agent.config),
errors: new RemoteMethod('error_data', agent.config),
metrics: new RemoteMethod('metric_data', agent.config),
traces: new RemoteMethod('transaction_sample_data', agent.config),
shutdown: new RemoteMethod('shutdown', agent.config),
events: new RemoteMethod('analytic_event_data', agent.config),
customEvents: new RemoteMethod('custom_event_data', agent.config),
queryData: new RemoteMethod('sql_trace_data', agent.config),
errorEvents: new RemoteMethod('error_event_data', agent.config)
}
}
CollectorAPI.prototype.connect = function connect(callback) {
if (!callback) throw new TypeError("callback is required")
var api = this
var attempts = 1
var max = BACKOFFS.length
var errors = []
function retry(error, response, body) {
if (error) errors.push(error)
if (!error || attempts >= max) {
dumpErrors(errors, 'connect')
return callback(error, response, body)
}
// failing high-security mode compliance will cause a disconnect
if (error.class === ERRORS.DISCONNECT) {
logger.error("The New Relic collector rejected this agent.")
logger.error(error.message)
}
var backoff = BACKOFFS[attempts - 1]
if (backoff.warn) {
logger.warn(
"No connection has been established to New Relic after %s attempts.",
attempts
)
}
logger.debug(
"Failed attempting to connect to New Relic, waiting %ss to retry.",
backoff.interval
)
attempts++
var id = setTimeout(function again() {
api._login(retry)
}, backoff.interval * TO_MILLIS)
if (id.unref) {
id.unref()
}
}
this._login(retry)
}
CollectorAPI.prototype._login = function _login(callback) {
var methods = this._methods
var agent = this._agent
methods.redirect.invoke(null, function cb_invoke(error, collector, body) {
if (error) return callback(error, collector, body)
if (!collector) {
logger.error(
"Requesting this account's collector from %s failed; trying default.",
agent.config.host
)
} else {
var parts = collector.split(':')
if (parts.length > 2) {
logger.error(
"Requesting collector from %s returned bogus result '%s'; trying default.",
agent.config.host,
collector
)
} else {
logger.debug(
"Requesting this account's collector from %s returned %s; reconfiguring.",
agent.config.host,
collector
)
agent.config.host = parts[0]
if (parts.length > 1) {
agent.config.port = parts[1]
}
}
}
facts(agent, function getEnvDict(environmentDict) {
// The collector really likes arrays.
// In fact, it kind of insists on them.
var environment = [environmentDict]
methods.handshake.invoke(environment, function cb_invoke(error, config, body) {
if (error) return callback(error, config, body)
if (!config || !config.agent_run_id) {
return callback(new Error("No agent run ID received from handshake."), config)
}
agent.setState('connected')
logger.info(
"Connected to %s:%d with agent run ID %s.",
agent.config.host,
agent.config.port,
config.agent_run_id
)
// pass configuration data from the API so automatic reconnect works
agent.reconfigure(config)
callback(null, config, body)
})
})
})
}
/**
* Send current public agent settings to collector. This should always be
* invoked after a successful connect response with server-side settings, but
* will also be invoked on any other config changes.
*
* @param {Function} callback The continuation / error handler.
*/
CollectorAPI.prototype.reportSettings = function reportSettings(callback) {
// The second argument to the callback is always empty data
this._methods.settings.invoke(
[this._agent.config.publicSettings()],
function cb_invoke(error, unused, body) {
if (error) dumpErrors([error], 'agent_settings')
if (callback) callback(error, body)
}
)
}
/**
* Send already-formatted error data by calling error_data. For
* performance reasons, the API methods do no validation, but the
* collector expects data in an exact format. It expects a JSON array
* containing the following 2 elements:
*
* 1. The agent run ID.
* 2. An array of one or more errors. See lib/error.js for details.
*
* @param {Array} errors The encoded errors list.
* @param {Function} callback The continuation / error handler.
*/
CollectorAPI.prototype.errorData = function errorData(errors, callback) {
if (!errors) throw new TypeError("must pass errors to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.errors, errors, callback)
}
/**
* Send already-formatted metric data by calling metric_data. For
* performance reasons, the API methods do no validation, but the collector
* expects data in an exact format format. It expects a JSON array containing
* the following 4 elements:
*
* 1. The agent run ID.
* 2. The time the metric data started being collected, in seconds since the
* epoch.
* 3. The time the metric data finished being collected, in seconds since the
* epoch.
* 4. An array of 1 or more metric arrays. See lib/metrics.js for details.
*
* @param {Array} metrics The encoded metrics list.
* @param {Function} callback The continuation / error handler.
*/
CollectorAPI.prototype.metricData = function metricData(metrics, callback) {
if (!metrics) throw new TypeError("must pass metrics to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.metrics, metrics, callback)
}
CollectorAPI.prototype.analyticsEvents = function analyticsEvents(events, callback) {
if (!events) throw new TypeError("must pass events to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.events, events, callback)
}
CollectorAPI.prototype.customEvents = function customEvents(events, callback) {
if (!events) throw new TypeError("must pass events to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.customEvents, events, callback)
}
/**
* Send already-formatted slow SQL data by calling
* sql_trace_data. For performance reasons, the API methods
* do no validation, but the collector expects data in an exact format
* format. It expects a JSON array containing the following 2 elements:
*
* 1. The agent run ID.
* 2. The encoded slow SQL data.
*
* @param {Array} queries The encoded slow SQL data.
* @param {Function} callback The continuation / error handler.
*/
CollectorAPI.prototype.queryData = function queryData(queries, callback) {
if (!queries) throw new TypeError("must pass queries to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.queryData, queries, callback)
}
CollectorAPI.prototype.errorEvents = function errorEvents(events, callback) {
if (!events) throw new TypeError("must pass queries to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.errorEvents, events, callback)
}
/**
* Send already-formatted slow trace data by calling
* transaction_sample_data. For performance reasons, the API methods
* do no validation, but the collector expects data in an exact format
* format. It expects a JSON array containing the following 2 elements:
*
* 1. The agent run ID.
* 2. The encoded slow trace data. This is the most complicated data
* format handled by the module, and documenting it is almost beyond the
* scope of comments. See lib/transaction/trace.js for details.
*
* @param {Array} trace The encoded trace data.
* @param {Function} callback The continuation / error handler.
*/
CollectorAPI.prototype.transactionSampleData =
function transactionSampleData(trace, callback) {
if (!trace) throw new TypeError("must pass slow trace data to send")
if (!callback) throw new TypeError("callback is required")
this._runLifecycle(this._methods.traces, trace, callback)
}
/**
* Sends no data aside from the message itself. Clears the run ID, which
* effectively disconnects the agent from the collector.
*
* @param Function callback Runs after the run ID has been cleared.
*/
CollectorAPI.prototype.shutdown = function shutdown(callback) {
if (!callback) throw new TypeError("callback is required")
var agent = this._agent
this._methods.shutdown.invoke(null, function closed(error, returned, body) {
if (error) {
dumpErrors([error], 'shutdown')
} else {
agent.setState('disconnected')
logger.info(
"Disconnected from New Relic; clearing run ID %s.",
agent.config.run_id
)
agent.config.run_id = undefined
}
callback(error, returned, body)
})
}
CollectorAPI.prototype._restart = function _restart(callback) {
var api = this
this.shutdown(function reconnect() {
api.connect(callback)
})
}
CollectorAPI.prototype._runLifecycle = function _runLifecycle(method, body, callback) {
if (!this.isConnected()) {
logger.warn("Not connected to New Relic. Not calling.", method.name)
return callback(new Error("Not connected to collector.", null, null))
}
var api = this
function standardHandler(error, returned, json) {
if (!error) return callback(error, returned, json)
dumpErrors([error], method.name)
if (error.statusCode === HTTP_REQUEST_TOO_LARGE) {
logger.error(
error,
"This call of %s sent New Relic too much data; discarding (%s):",
method.name,
HTTP_REQUEST_TOO_LARGE
)
return callback(null, returned, json)
} else if (error.statusCode === HTTP_UNSUPPORTED_MEDIA_TYPE) {
logger.error(
error,
"The New Relic collector couldn't deserialize data; discarding for %s (%s):",
method.name,
HTTP_UNSUPPORTED_MEDIA_TYPE
)
return callback(null, returned, json)
} else if (error.statusCode === HTTP_LOL_COLLECTOR) {
logger.debug(
error,
"New Relic is experiencing a spot of bother; please hold on (%s):",
HTTP_LOL_COLLECTOR
)
return callback(error, returned, json)
} else if (error.statusCode === HTTP_SERVER_INTERNAL) {
logger.error(
error,
"New Relic's servers encountered a severe internal error on %s (%s):",
method.name,
HTTP_SERVER_INTERNAL
)
return callback(error, returned, json)
} else if (error.class === ERRORS.INVALID_LICENSE) {
logger.error(
error,
"Your New Relic license key appears to be invalid. Please double-check it:"
)
return callback(error, returned, json)
} else if (error.class === ERRORS.LIMIT_EXCEEDED) {
logger.error(
error,
"New Relic ran into a weird problem with %s. Let support@newrelic.com know:",
method.name
)
return callback(null, returned, json)
} else if (error.class === ERRORS.RESTART) {
logger.info(
error,
"The New Relic collector requested a connection restart on %s:",
method.name
)
return api._restart(function cb__restart() {
method.invoke(body, standardHandler)
})
} else if (error.class === ERRORS.DISCONNECT) {
logger.error(error, "The New Relic collector is shutting down this agent:")
return api._agent.stop(function cb_stop() {
callback(error, returned, json)
})
} else if (error.class === ERRORS.MAINTENANCE) {
logger.info(
error,
"The New Relic server for your account is currently undergoing maintenance. " +
"Data will be held until it can be submitted (failed on %s):",
method.name
)
return callback(error, returned, json)
} else if (error.class === ERRORS.RUNTIME) {
logger.warn(
error,
"Calling %s on New Relic failed due to a runtime error. " +
"Data will be held until it can be submitted:",
method.name
)
return callback(error, returned, json)
}
logger.error(
error,
"Calling %s on New Relic failed unexpectedly. " +
"Data will be held until it can be submitted:",
method.name
)
return callback(error, returned, json)
}
method.invoke(body, standardHandler)
}
CollectorAPI.prototype.isConnected = function isConnected() {
return !!this._agent.config.run_id
}
module.exports = CollectorAPI
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | 1 1 1 1 | 'use strict'
var fetchSystemInfo = require('../system-info')
var parse_labels = require('../util/label-parser')
module.exports = facts
function facts(agent, callback) {
fetchSystemInfo(agent, function cb_fetchSystemInfo(systemInfo) {
var hostname = agent.config.getHostnameSafe()
var results = {
utilization: {
metadata_version: 2,
logical_processors: systemInfo.logicalProcessors,
total_ram_mib: systemInfo.memory,
hostname: hostname
},
pid: process.pid,
host: hostname,
display_host: agent.config.getDisplayHost() || hostname,
language: 'nodejs',
app_name: agent.config.applications(),
agent_version: agent.version,
environment: agent.environment,
settings: agent.config.publicSettings(),
high_security: agent.config.high_security,
labels: parse_labels(agent.config.labels)
}
// TODO: After reconfiguring agent startup to wait for the server to start
// or for the first transaction, add the `port` for the server too.
// NOTE: The concat is necessary to prevent sort from happening in-place.
results.identifier = [
'nodejs',
results.host,
results.app_name.concat([]).sort().join(',')
].join(':')
if (systemInfo.aws || systemInfo.docker) {
results.utilization.vendors = {}
if (systemInfo.aws) {
results.utilization.vendors.aws = systemInfo.aws
}
if (systemInfo.docker) {
results.utilization.vendors.docker = systemInfo.docker
}
}
if (systemInfo.config) {
results.utilization.config = systemInfo.config
}
return callback(results)
})
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 | 1 1 1 1 1 1 | 'use strict'
var parse = require('url').parse
var ProxyAgent = require('https-proxy-agent')
var logger = require('../logger').child({component: 'http-agent'})
var certificates = require('./ssl/certificates.js')
exports.proxyAgent = function proxyAgent(config) {
var opts = proxyOptions(config)
var proxy_url = opts.proxy_url
var proxy_opts = {
host: proxy_url.host,
port: proxy_url.port,
protocol: proxy_url.protocol,
secureEndpoint: config.ssl,
auth: proxy_url.auth,
ca: opts.certificates
}
logger.info({
host: proxy_opts.host,
port: proxy_opts.port,
auth: !!proxy_opts.auth,
protocol: proxy_url.protocol
}, 'using proxy')
var proxy = new ProxyAgent(proxy_opts)
return proxy
}
function proxyOptions(config) {
if (config.proxy) {
var parsed_url = parse(config.proxy)
var proxy_url = {
protocol: parsed_url.protocol || 'http:',
host: parsed_url.hostname,
port: parsed_url.port || 80,
auth: parsed_url.auth
}
} else {
var proxy_auth = config.proxy_user
if (config.proxy_pass !== '') {
proxy_auth += ':' + config.proxy_pass
}
// Unless a proxy config is provided, default to HTTP.
proxy_url = {
protocol: 'http:',
host: config.proxy_host || 'localhost',
port: config.proxy_port || 80,
auth: proxy_auth
}
}
var opts = {
proxy_url: proxy_url
}
// merge user certificates with built-in certs
if (config.certificates && config.certificates.length > 0) {
logger.info(
'Using a proxy with a special cert. This enables our cert bundle which, combined ' +
'with some versions of node, exacerbates a leak in node core TLS.'
)
opts.certificates = config.certificates.concat(certificates)
}
return opts
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 | 1 1 1 1 1 | 'use strict'
var format = require('util').format
var logger = require('../logger').child({component: 'new_relic_response'})
/*
*
* CONSTANTS
*
*/
var RESPONSE_VALUE_NAME = 'return_value'
var EXCEPTION_VALUE_NAME = 'exception'
/**
* The collector has many ways of indicating failure, and isn't
* necessarily consistent. Because there can either be a failure at
* the network level, a nonstandard HTTP status code on the response,
* or a JSON-encoded exception in the response body, there's a lot of
* conditional logic in here that tries to grab as much information
* about errors as possible, and to parse out the return value as often
* as possible.
*
* @param string name Remote method name that was invoked.
* @param ServerResponse response HTTP response stream
* @param Function callback Function that will be called with any
* error, the value returned by the server
* (if any), and the raw JSON of the
* server's response.
*
* @returns Function Another callback that is meant to be invoked with
* any errors from reading the response stream, as
* well as a string containing the full response.
*/
module.exports = function parse(name, response, callback) {
if (!name) throw new TypeError('collector method name required!')
if (!response) throw new TypeError('HTTP response required!')
if (!callback) throw new TypeError('callback required!')
return function parser(inError, body) {
/* jshint maxdepth:4 */
var code = response.statusCode
var errors = []
var errorClass
var json
var returned
if (code !== 200) logger.debug("Got %s as a response code from the collector.", code)
if (inError) errors.push(inError)
if (body) {
try {
json = JSON.parse(body)
// Can be super verbose, but useful for debugging.
logger.trace({response: json}, "Deserialized from collector:")
// If we get messages back from the collector, be polite and pass them along.
returned = json[RESPONSE_VALUE_NAME]
if (returned && returned.messages) {
returned.messages.forEach(function cb_forEach(element) {
logger.info(element.message)
})
}
/* Wait to deal with errors in the response until any messages have
* been passed along. Otherwise, ensure that there was a return
* value, raising an error if not.
*
* Some errors are only interesting if the status code indicates
* that the request went bad already, so filter out adding more
* errors when statusCode is not OK (200).
*/
var exception = json[EXCEPTION_VALUE_NAME]
if (exception) {
if (exception.message) {
errors.push(new Error(exception.message))
} else if (code === 200 ) {
errors.push(new Error('New Relic internal error'))
}
if (exception.error_type) errorClass = exception.error_type
} else if (code === 200 && returned === undefined) {
errors.push(new Error(format('No data found in response to %s.', name)))
}
} catch (error) {
logger.trace(error, 'Could not parse response from the collector: %s', body)
errors.push(error)
}
} else {
errors.push(new Error(format('No body found in response to %s.', name)))
}
if (code !== 200) {
errors.push(new Error(format('Got HTTP %s in response to %s.', code, name)))
}
var error
if (errors.length > 0) {
error = errors.shift()
error.statusCode = code
// Preserve a consistent hidden class (cheaper than sub-classing Error).
error.class = errorClass ? errorClass : undefined
error.laterErrors = (errors.length > 0) ? errors : undefined
}
// Raw json is useful for testing and logging.
process.nextTick(function cb_nextTick() {
callback(error, returned, json)
})
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var util = require('util')
var url = require('url')
var http = require('http')
var https = require('https')
var zlib = require('zlib')
var logger = require('../logger').child({component: 'remote_method_invoke'})
var parse = require('./parse-response')
var safeJSON = require('../util/safe-json')
var Sink = require('../util/stream-sink')
var agents = require('./http-agents')
var certificates = require('./ssl/certificates')
/*
*
* CONSTANTS
*
*/
var PROTOCOL_VERSION = 14
var RUN_ID_NAME = 'run_id'
var RAW_METHOD_PATH = '/agent_listener/invoke_raw_method'
// see job/collector-master/javadoc/com/nr/servlet/AgentListener.html on NR Jenkins
var USER_AGENT_FORMAT = "NewRelic-NodeAgent/%s (nodejs %s %s-%s)"
var ENCODING_HEADER = 'CONTENT-ENCODING'
var CONTENT_TYPE_HEADER = 'Content-Type'
var DEFAULT_ENCODING = 'identity'
var DEFAULT_CONTENT_TYPE = 'application/json'
var COMPRESSED_CONTENT_TYPE = 'application/octet-stream'
function RemoteMethod(name, config) {
if (!name) {
throw new TypeError("Must include name of method to invoke on collector.")
}
this.name = name
this._config = config
}
RemoteMethod.prototype.serialize = function serialize(payload, callback) {
try {
var res = safeJSON.stringifySync(payload)
} catch (error) {
logger.error(error, "Unable to serialize payload for method %s.", this.name)
return process.nextTick(function cb_nextTick() {
return callback(error)
})
}
return callback(null, res)
}
/**
* The primary operation on RemoteMethod objects. If you're calling anything on
* RemoteMethod objects aside from invoke (and you're not writing test code),
* you're doing it wrong.
*
* @param object payload Serializable payload.
* @param Function callback What to do next. Gets passed any error.
*/
RemoteMethod.prototype.invoke = function call(payload, callback) {
if (!payload) payload = []
this.serialize(payload, function cb_serialize(err, serialized) {
if (err) return callback(err)
this._post(serialized, callback)
}.bind(this))
}
/**
* Take a serialized payload and create a response wrapper for it before
* invoking the method on the collector.
*
* @param string methodName Name of method to invoke on collector.
* @param string data Serialized payload.
* @param Function callback What to do next. Gets passed any error.
*/
RemoteMethod.prototype._post = function _post(data, callback) {
var method = this
// set up standard response handling
function onResponse(response) {
response.on('end', function handle_end() {
logger.debug(
"Finished receiving data back from the collector for %s.",
method.name
)
})
response.setEncoding('utf8')
response.pipe(new Sink(parse(method.name, response, callback)))
}
var options = {
port: this._config.port,
host: this._config.host,
compressed: this._shouldCompress(data),
path: this._path(),
onError: callback,
onResponse: onResponse
}
if (options.compressed) {
logger.trace({data: data}, "Sending %s on collector API with (COMPRESSED)", this.name)
var useGzip = this._config.compressed_content_encoding === 'gzip'
var compressor = useGzip ? zlib.gzip : zlib.deflate
compressor(data, function cb_compressor(err, compressed) {
if (err) {
logger.warn(err, "Error compressing JSON for delivery. Not sending.")
return callback(err)
}
options.body = compressed
method._safeRequest(options)
})
} else {
logger.debug({data: data}, "Calling %s on collector API", this.name)
options.body = data
this._safeRequest(options)
}
}
/**
* http.request does its own DNS lookup, and if it fails, will cause
* dns.lookup to throw asynchronously instead of passing the error to
* the callback (which is obviously awesome). To prevent New Relic from
* crashing people's applications, verify that lookup works and bail out
* early if not.
*
* Also, ensure that all the necessary parameters are set before
* actually making the request. Useful to put here to simplify test code
* that calls _request directly.
*
* @param object options A dictionary of request parameters.
*/
RemoteMethod.prototype._safeRequest = function _safeRequest(options) {
if (!options) throw new Error("Must include options to make request!")
if (!options.host) throw new Error("Must include collector hostname!")
if (!options.port) throw new Error("Must include collector port!")
if (!options.onError) throw new Error("Must include error handler!")
if (!options.onResponse) throw new Error("Must include response handler!")
if (!options.body) throw new Error("Must include body to send to collector!")
if (!options.path) throw new Error("Must include URL to request!")
var protocol = this._config.ssl ? 'https' : 'http'
var logconfig = this._config.logging
var audit_log = this._config.audit_log
var logevent = util.format({
body: Buffer.isBuffer(options.body) ? 'Buffer ' + options.body.length : options.body
}, "Posting to %s://%s:%s%s",
protocol,
options.host,
options.port,
options.path
)
// if trace level is not explicity enabled
// check to see if the audit log is enabled
if ((typeof logconfig !== 'undefined') && logconfig.level !== 'trace') {
if (audit_log.enabled &&
// if the filter property is empty, then always log the event
// otherwise check to see if the filter includes this method
(audit_log.endpoints.length > 0 ?
audit_log.endpoints.indexOf(this.name) > -1 : true)) {
logger.info(logevent)
}
} else {
logger.trace(logevent)
}
this._request(options)
}
/**
* Generate the request headers and wire up the request. There are many
* parameters used to make a request:
*
* @param string options.host Hostname (or proxy hostname) for collector.
* @param string options.port Port (or proxy port) for collector.
* @param string options.path URL path for method being invoked on collector.
* @param string options.body Serialized payload to be sent to collector.
* @param boolean options.compressed Whether the payload has been compressed.
* @param Function options.onError Error handler for this request (probably the
* original callback given to .send).
* @param Function options.onResponse Response handler for this request (created by
* ._post).
*/
RemoteMethod.prototype._request = function _request(options) {
var requestOptions = {
method: this._config.put_for_data_send ? 'PUT' : 'POST',
setHost: false, // See below
host: options.host, // Set explicitly in the headers
port: options.port,
path: options.path,
headers: this._headers(options.body, options.compressed),
__NR__connection: true // Who measures the metrics measurer?
}
var request
var isProxy = !!(
this._config.proxy ||
this._config.proxy_port ||
this._config.proxy_host
)
if (isProxy) {
// proxy
requestOptions.agent = agents.proxyAgent(this._config)
request = https.request(requestOptions)
// FIXME: The agent keeps this connection open when using the proxy.
// This will prevent the application from shutting down correctly.
// Explicitly destroy the socket when the response is completed.
//
// This goes against keep-alive, but for now letting the application die
// gracefully is more important.
request.on('response', function cb_on_response(sock) {
sock.on('end', function cb_on_end() {
sock.destroy()
})
})
} else if (this._config.ssl) {
if (this._config.certificates && this._config.certificates.length > 0) {
logger.debug(
'Adding custom certificate to the cert bundle.'
)
requestOptions.ca = this._config.certificates.concat(certificates)
}
request = https.request(requestOptions)
} else {
request = http.request(requestOptions)
}
request.on('error', options.onError)
request.on('response', options.onResponse)
request.end(options.body)
}
/**
* See the constants list for the format string (and the URL that explains it).
*/
RemoteMethod.prototype._userAgent = function _userAgent() {
return util.format(USER_AGENT_FORMAT,
this._config.version,
process.versions.node,
process.platform,
process.arch)
}
/**
* Generate a URL the collector understands.
*
* @returns string The URL path to be POSTed to.
*/
RemoteMethod.prototype._path = function _path() {
var query = {
marshal_format: 'json',
protocol_version: PROTOCOL_VERSION,
license_key: this._config.license_key,
method: this.name
}
if (this._config.run_id) query[RUN_ID_NAME] = this._config.run_id
var formatted = url.format({
pathname: RAW_METHOD_PATH,
query: query
})
return formatted
}
/**
* @param {number} length - Length of data to be sent.
* @param {bool} compressed - The compression method used, if any.
*/
RemoteMethod.prototype._headers = function _headers(body, compressed) {
var agent = this._userAgent()
var headers = {
// select the virtual host on the server end
'Host': this._config.host,
'User-Agent': agent,
'Connection': 'Keep-Alive',
'Content-Length': byteLength(body)
}
if (compressed) {
headers[ENCODING_HEADER] = this._config.compressed_content_encoding
headers[CONTENT_TYPE_HEADER] = COMPRESSED_CONTENT_TYPE
} else {
headers[ENCODING_HEADER] = DEFAULT_ENCODING
headers[CONTENT_TYPE_HEADER] = DEFAULT_CONTENT_TYPE
}
return headers
}
/**
* FLN pretty much decided on his own recognizance that 64K was a good point
* at which to compress a server response. There's only a loose consensus that
* the threshold should probably be much higher than this, if only to keep the
* load on the collector down.
*
* FIXME: come up with a better heuristic
*/
RemoteMethod.prototype._shouldCompress = function _shouldCompress(data) {
return data && byteLength(data) > 65536
}
function byteLength(data) {
if (!data) {
return 0
}
if (data instanceof Buffer) {
return data.length
}
return Buffer.byteLength(data, 'utf8')
}
module.exports = RemoteMethod
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| certificates.js | 100% | (1 / 1) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (1 / 1) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 | 1 | 'use strict' /** * certificates.js - CA bundle for SSL communication with RPM. * * This file contains the X509 certificates used to communicate with New Relic * over SSL. */ module.exports = [ // AddTrustExternalCARoot "-----BEGIN CERTIFICATE-----\n" + "MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU\n" + "MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs\n" + "IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290\n" + "MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux\n" + "FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h\n" + "bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v\n" + "dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt\n" + "H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9\n" + "uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX\n" + "mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX\n" + "a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN\n" + "E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0\n" + "WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD\n" + "VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0\n" + "Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU\n" + "cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx\n" + "IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN\n" + "AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH\n" + "YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5\n" + "6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC\n" + "Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX\n" + "c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a\n" + "mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=\n" + "-----END CERTIFICATE-----\n", // DigiCertAssuredIDRootCA "-----BEGIN CERTIFICATE-----\n" + "MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv\n" + "b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG\n" + "EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl\n" + "cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi\n" + "MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c\n" + "JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP\n" + "mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+\n" + "wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4\n" + "VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/\n" + "AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB\n" + "AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW\n" + "BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun\n" + "pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC\n" + "dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf\n" + "fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm\n" + "NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx\n" + "H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe\n" + "+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==\n" + "-----END CERTIFICATE-----\n", // DigiCertAssuredIDRootG2 "-----BEGIN CERTIFICATE-----\n" + "MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv\n" + "b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG\n" + "EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl\n" + "cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi\n" + "MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA\n" + "n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc\n" + "biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp\n" + "EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA\n" + "bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu\n" + "YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB\n" + "AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW\n" + "BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI\n" + "QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I\n" + "0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni\n" + "lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9\n" + "B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv\n" + "ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo\n" + "IhNzbM8m9Yop5w==\n" + "-----END CERTIFICATE-----\n", // DigiCertAssuredIDRootG3 "-----BEGIN CERTIFICATE-----\n" + "MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw\n" + "CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu\n" + "ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg\n" + "RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV\n" + "UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu\n" + "Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq\n" + "hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf\n" + "Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q\n" + "RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/\n" + "BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD\n" + "AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY\n" + "JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv\n" + "6pZjamVFkpUBtA==\n" + "-----END CERTIFICATE-----\n", // DigiCertGlobalRootCA "-----BEGIN CERTIFICATE-----\n" + "MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD\n" + "QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT\n" + "MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j\n" + "b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG\n" + "9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB\n" + "CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97\n" + "nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt\n" + "43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P\n" + "T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4\n" + "gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO\n" + "BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR\n" + "TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw\n" + "DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr\n" + "hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg\n" + "06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF\n" + "PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls\n" + "YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk\n" + "CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=\n" + "-----END CERTIFICATE-----\n", // DigiCertGlobalRootG2 "-----BEGIN CERTIFICATE-----\n" + "MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH\n" + "MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT\n" + "MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j\n" + "b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG\n" + "9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI\n" + "2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx\n" + "1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ\n" + "q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz\n" + "tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ\n" + "vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP\n" + "BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV\n" + "5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY\n" + "1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4\n" + "NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG\n" + "Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91\n" + "8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe\n" + "pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl\n" + "MrY=\n" + "-----END CERTIFICATE-----\n", // DigiCertGlobalRootG3 "-----BEGIN CERTIFICATE-----\n" + "MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw\n" + "CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu\n" + "ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe\n" + "Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw\n" + "EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x\n" + "IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF\n" + "K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG\n" + "fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO\n" + "Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd\n" + "BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx\n" + "AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/\n" + "oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8\n" + "sycX\n" + "-----END CERTIFICATE-----\n", // DigiCertHighAssuranceEVRootCA "-----BEGIN CERTIFICATE-----\n" + "MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j\n" + "ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL\n" + "MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3\n" + "LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug\n" + "RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm\n" + "+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW\n" + "PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM\n" + "xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB\n" + "Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3\n" + "hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg\n" + "EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF\n" + "MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA\n" + "FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec\n" + "nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z\n" + "eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF\n" + "hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2\n" + "Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe\n" + "vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep\n" + "+OkuE6N36B9K\n" + "-----END CERTIFICATE-----\n", // DigiCertTrustedRootG4 "-----BEGIN CERTIFICATE-----\n" + "MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi\n" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n" + "d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg\n" + "RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV\n" + "UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu\n" + "Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG\n" + "SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y\n" + "ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If\n" + "xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV\n" + "ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO\n" + "DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ\n" + "jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/\n" + "CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi\n" + "EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM\n" + "fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY\n" + "uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK\n" + "chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t\n" + "9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB\n" + "hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD\n" + "ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2\n" + "SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd\n" + "+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc\n" + "fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa\n" + "sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N\n" + "cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N\n" + "0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie\n" + "4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI\n" + "r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1\n" + "/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm\n" + "gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+\n" + "-----END CERTIFICATE-----\n", // Equifax_Secure_Certificate_Authority "-----BEGIN CERTIFICATE-----\n" + "MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV\n" + "UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy\n" + "dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1\n" + "MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx\n" + "dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B\n" + "AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f\n" + "BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A\n" + "cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC\n" + "AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ\n" + "MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm\n" + "aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw\n" + "ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj\n" + "IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF\n" + "MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA\n" + "A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y\n" + "7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh\n" + "1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4\n" + "-----END CERTIFICATE-----\n", // GeoTrust_Global_CA "-----BEGIN CERTIFICATE-----\n" + "MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT\n" + "MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i\n" + "YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG\n" + "EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg\n" + "R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9\n" + "9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq\n" + "fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv\n" + "iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU\n" + "1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+\n" + "bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW\n" + "MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA\n" + "ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l\n" + "uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn\n" + "Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS\n" + "tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF\n" + "PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un\n" + "hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV\n" + "5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==\n" + "-----END CERTIFICATE-----\n", // GeoTrust_Global_CA2 "-----BEGIN CERTIFICATE-----\n" + "MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW\n" + "MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs\n" + "IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG\n" + "EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg\n" + "R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A\n" + "PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8\n" + "Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL\n" + "TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL\n" + "5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7\n" + "S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe\n" + "2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE\n" + "FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap\n" + "EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td\n" + "EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv\n" + "/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN\n" + "A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0\n" + "abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF\n" + "I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz\n" + "4iIprn2DQKi6bA==\n" + "-----END CERTIFICATE----- \n", // GeoTrust_Primary_CA "-----BEGIN CERTIFICATE-----\n" + "MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY\n" + "MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo\n" + "R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx\n" + "MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK\n" + "Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp\n" + "ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC\n" + "AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9\n" + "AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA\n" + "ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0\n" + "7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W\n" + "kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI\n" + "mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G\n" + "A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ\n" + "KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1\n" + "6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl\n" + "4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K\n" + "oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj\n" + "UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU\n" + "AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=\n" + "-----END CERTIFICATE-----\n", // GeoTrust_Primary_CA_G2_ECC "-----BEGIN CERTIFICATE-----\n" + "MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL\n" + "MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj\n" + "KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2\n" + "MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0\n" + "eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV\n" + "BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw\n" + "NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV\n" + "BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH\n" + "MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL\n" + "So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal\n" + "tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO\n" + "BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG\n" + "CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT\n" + "qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz\n" + "rD6ogRLQy7rQkgu2npaqBA+K\n" + "-----END CERTIFICATE-----\n", // GeoTrust_Universal_CA "-----BEGIN CERTIFICATE-----\n" + "MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW\n" + "MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy\n" + "c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE\n" + "BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0\n" + "IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV\n" + "VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8\n" + "cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT\n" + "QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh\n" + "F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v\n" + "c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w\n" + "mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd\n" + "VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX\n" + "teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ\n" + "f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe\n" + "Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+\n" + "nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB\n" + "/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY\n" + "MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG\n" + "9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc\n" + "aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX\n" + "IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn\n" + "ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z\n" + "uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN\n" + "Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja\n" + "QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW\n" + "koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9\n" + "ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt\n" + "DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm\n" + "bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=\n" + "-----END CERTIFICATE----- \n", // GeoTrust_Universal_CA2 "-----BEGIN CERTIFICATE-----\n" + "MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW\n" + "MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy\n" + "c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD\n" + "VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1\n" + "c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC\n" + "AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81\n" + "WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG\n" + "FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq\n" + "XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL\n" + "se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb\n" + "KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd\n" + "IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73\n" + "y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt\n" + "hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc\n" + "QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4\n" + "Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV\n" + "HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV\n" + "HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ\n" + "KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z\n" + "dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ\n" + "L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr\n" + "Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo\n" + "ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY\n" + "T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz\n" + "GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m\n" + "1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV\n" + "OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH\n" + "6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX\n" + "QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS\n" + "-----END CERTIFICATE-----\n", // GlobalSign_Root_CA "-----BEGIN CERTIFICATE-----\n" + "MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG\n" + "A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv\n" + "b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw\n" + "MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i\n" + "YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT\n" + "aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ\n" + "jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp\n" + "xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp\n" + "1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG\n" + "snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ\n" + "U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8\n" + "9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E\n" + "BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B\n" + "AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz\n" + "yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE\n" + "38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP\n" + "AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad\n" + "DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME\n" + "HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==\n" + "-----END CERTIFICATE-----\n", // GlobalSign_Root_CA_ECC_R4 "-----BEGIN CERTIFICATE-----\n" + "MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk\n" + "MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH\n" + "bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX\n" + "DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD\n" + "QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu\n" + "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ\n" + "FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw\n" + "DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F\n" + "uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX\n" + "kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs\n" + "ewv4n4Q=\n" + "-----END CERTIFICATE-----\n", // GlobalSign_Root_CA_ECC_R5 "-----BEGIN CERTIFICATE-----\n" + "MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk\n" + "MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH\n" + "bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX\n" + "DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD\n" + "QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu\n" + "MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc\n" + "8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke\n" + "hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD\n" + "VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI\n" + "KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg\n" + "515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO\n" + "xwy8p2Fp8fc74SrL+SvzZpA3\n" + "-----END CERTIFICATE-----\n", // GlobalSign_Root_CA_R3 "-----BEGIN CERTIFICATE-----\n" + "MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G\n" + "A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp\n" + "Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4\n" + "MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG\n" + "A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI\n" + "hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8\n" + "RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT\n" + "gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm\n" + "KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd\n" + "QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ\n" + "XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw\n" + "DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o\n" + "LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU\n" + "RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp\n" + "jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK\n" + "6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX\n" + "mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs\n" + "Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH\n" + "WD9f\n" + "-----END CERTIFICATE-----\n", // GlobalSign_Root_CA_RC2 "-----BEGIN CERTIFICATE-----\n" + "MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G\n" + "A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp\n" + "Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1\n" + "MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG\n" + "A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI\n" + "hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL\n" + "v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8\n" + "eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq\n" + "tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd\n" + "C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa\n" + "zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB\n" + "mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH\n" + "V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n\n" + "bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG\n" + "3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs\n" + "J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO\n" + "291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS\n" + "ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd\n" + "AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7\n" + "TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-2G2 "-----BEGIN CERTIFICATE-----\n" + "MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw\n" + "CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns\n" + "YXNzIDIgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH\n" + "MjE6MDgGA1UECxMxKGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9y\n" + "aXplZCB1c2Ugb25seTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazAe\n" + "Fw05ODA1MTgwMDAwMDBaFw0yODA4MDEyMzU5NTlaMIHBMQswCQYDVQQGEwJVUzEX\n" + "MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0NsYXNzIDIgUHVibGlj\n" + "IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBHMjE6MDgGA1UECxMx\n" + "KGMpIDE5OTggVmVyaVNpZ24sIEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s\n" + "eTEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazCBnzANBgkqhkiG9w0B\n" + "AQEFAAOBjQAwgYkCgYEAp4gBIXQs5xoD8JjhlzwPIQjxnNuX6Zr8wgQGE75fUsjM\n" + "HiwSViy4AWkszJkfrbCWrnkE8hM5wXuYuggs6MKEEyyqaekJ9MepAqRCwiNPStjw\n" + "DqL7MWzJ5m+ZJwf15vRMeJ5t60aG+rmGyVTyssSv1EYcWskVMP8NbPUtDm3Of3cC\n" + "AwEAATANBgkqhkiG9w0BAQUFAAOBgQByLvl/0fFx+8Se9sVeUYpAmLho+Jscg9ji\n" + "nb3/7aHmZuovCfTK1+qlK5X2JGCGTUQug6XELaDTrnhpb3LabK4I8GOSN+a7xDAX\n" + "rXfMSTWqz9iP0b63GJZHc2pUIjRkLbYWm1lbtFFZOrMLFPQS32eg9K0yZF6xRnIn\n" + "jBJ7xUS0rg==\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-2G3 "-----BEGIN CERTIFICATE-----\n" + "MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ\n" + "BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy\n" + "aVNpZ24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24s\n" + "IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNp\n" + "Z24gQ2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0\n" + "eSAtIEczMB4XDTk5MTAwMTAwMDAwMFoXDTM2MDcxNjIzNTk1OVowgcoxCzAJBgNV\n" + "BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNp\n" + "Z24gVHJ1c3QgTmV0d29yazE6MDgGA1UECxMxKGMpIDE5OTkgVmVyaVNpZ24sIElu\n" + "Yy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTFFMEMGA1UEAxM8VmVyaVNpZ24g\n" + "Q2xhc3MgMiBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAt\n" + "IEczMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArwoNwtUs22e5LeWU\n" + "J92lvuCwTY+zYVY81nzD9M0+hsuiiOLh2KRpxbXiv8GmR1BeRjmL1Za6tW8UvxDO\n" + "JxOeBUebMXoT2B/Z0wI3i60sR/COgQanDTAM6/c8DyAd3HJG7qUCyFvDyVZpTMUY\n" + "wZF7C9UTAJu878NIPkZgIIUq1ZC2zYugzDLdt/1AVbJQHFauzI13TccgTacxdu9o\n" + "koqQHgiBVrKtaaNS0MscxCM9H5n+TOgWY47GCI72MfbS+uV23bUckqNJzc0BzWjN\n" + "qWm6o+sdDZykIKbBoMXRRkwXbdKsZj+WjOCE1Db/IlnF+RFgqF8EffIa9iVCYQ/E\n" + "Srg+iQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQA0JhU8wI1NQ0kdvekhktdmnLfe\n" + "xbjQ5F1fdiLAJvmEOjr5jLX77GDx6M4EsMjdpwOPMPOY36TmpDHf0xwLRtxyID+u\n" + "7gU8pDM/CzmscHhzS5kr3zDCVLCoO1Wh/hYozUK9dG6A2ydEp85EXdQbkJgNHkKU\n" + "sQAsBNB0owIFImNjzYO1+8FtYmtpdf1dcEG59b98377BMnMiIYtYgXsVkXq642RI\n" + "sH/7NiXaldDxJBQX3RiAa0YjOVT1jmIJBB2UkKab5iXiQkWquJCtvgiPqQtCGJTP\n" + "cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-3 "-----BEGIN CERTIFICATE-----\n" + "MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG\n" + "A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz\n" + "cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2\n" + "MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV\n" + "BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt\n" + "YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN\n" + "ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE\n" + "BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is\n" + "I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G\n" + "CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i\n" + "2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ\n" + "2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-3G2 "-----BEGIN CERTIFICATE-----\n" + "MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ\n" + "BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh\n" + "c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy\n" + "MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp\n" + "emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X\n" + "DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw\n" + "FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg\n" + "UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo\n" + "YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5\n" + "MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB\n" + "AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4\n" + "pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0\n" + "13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID\n" + "AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk\n" + "U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i\n" + "F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY\n" + "oJ2daZH9\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-3G3 "-----BEGIN CERTIFICATE-----\n" + "MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw\n" + "CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl\n" + "cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu\n" + "LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT\n" + "aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp\n" + "dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD\n" + "VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT\n" + "aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ\n" + "bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu\n" + "IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg\n" + "LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b\n" + "N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t\n" + "KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu\n" + "kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm\n" + "CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ\n" + "Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu\n" + "imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te\n" + "2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe\n" + "DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC\n" + "/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p\n" + "F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt\n" + "TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-3G4 "-----BEGIN CERTIFICATE-----\n" + "MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL\n" + "MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW\n" + "ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln\n" + "biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp\n" + "U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y\n" + "aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG\n" + "A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp\n" + "U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg\n" + "SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln\n" + "biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5\n" + "IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm\n" + "GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve\n" + "fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw\n" + "AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ\n" + "aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj\n" + "aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW\n" + "kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC\n" + "4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga\n" + "FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-3G5 "-----BEGIN CERTIFICATE-----\n" + "MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB\n" + "yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL\n" + "ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp\n" + "U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW\n" + "ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0\n" + "aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL\n" + "MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW\n" + "ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln\n" + "biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp\n" + "U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y\n" + "aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1\n" + "nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex\n" + "t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz\n" + "SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG\n" + "BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+\n" + "rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/\n" + "NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E\n" + "BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH\n" + "BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy\n" + "aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv\n" + "MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE\n" + "p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y\n" + "5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK\n" + "WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ\n" + "4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N\n" + "hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-4G3 "-----BEGIN CERTIFICATE-----\n" + "MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw\n" + "CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl\n" + "cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu\n" + "LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT\n" + "aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp\n" + "dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD\n" + "VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT\n" + "aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ\n" + "bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu\n" + "IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg\n" + "LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1\n" + "GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ\n" + "+mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd\n" + "U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm\n" + "NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY\n" + "ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/\n" + "ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1\n" + "CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq\n" + "g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm\n" + "fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c\n" + "2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/\n" + "bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==\n" + "-----END CERTIFICATE-----\n", // VeriSign-PCA-universal "-----BEGIN CERTIFICATE-----\n" + "MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB\n" + "vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL\n" + "ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp\n" + "U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W\n" + "ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe\n" + "Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX\n" + "MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0\n" + "IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y\n" + "IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh\n" + "bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF\n" + "AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF\n" + "9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH\n" + "H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H\n" + "LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN\n" + "/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT\n" + "rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud\n" + "EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw\n" + "WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs\n" + "exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud\n" + "DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4\n" + "sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+\n" + "seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz\n" + "4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+\n" + "BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR\n" + "lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3\n" + "7M2CYfE45k+XmCpajQ==\n" + "-----END CERTIFICATE-----\n", // gd-class2-root "-----BEGIN CERTIFICATE-----\n" + "MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh\n" + "MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE\n" + "YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3\n" + "MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo\n" + "ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg\n" + "MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN\n" + "ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA\n" + "PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w\n" + "wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi\n" + "EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY\n" + "avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+\n" + "YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE\n" + "sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h\n" + "/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5\n" + "IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj\n" + "YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD\n" + "ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy\n" + "OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P\n" + "TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ\n" + "HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER\n" + "dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf\n" + "ReYNnyicsbkqWletNw+vHX/bvZ8=\n" + "-----END CERTIFICATE-----\n", // gdroot-g2 "-----BEGIN CERTIFICATE-----\n" + "MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx\n" + "EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT\n" + "EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp\n" + "ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz\n" + "NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH\n" + "EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE\n" + "AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw\n" + "DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD\n" + "E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH\n" + "/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy\n" + "DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh\n" + "GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR\n" + "tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA\n" + "AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE\n" + "FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX\n" + "WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu\n" + "9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr\n" + "gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo\n" + "2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO\n" + "LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI\n" + "4uJEvlz36hz1\n" + "-----END CERTIFICATE-----\n", // sf-class2-root "-----BEGIN CERTIFICATE-----\n" + "MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl\n" + "MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp\n" + "U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw\n" + "NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE\n" + "ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp\n" + "ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3\n" + "DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf\n" + "8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN\n" + "+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0\n" + "X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa\n" + "K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA\n" + "1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G\n" + "A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR\n" + "zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0\n" + "YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD\n" + "bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w\n" + "DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3\n" + "L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D\n" + "eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl\n" + "xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp\n" + "VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY\n" + "WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=\n" + "-----END CERTIFICATE-----\n", // sfroot-g2 "-----BEGIN CERTIFICATE-----\n" + "MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx\n" + "EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT\n" + "HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs\n" + "ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw\n" + "MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6\n" + "b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj\n" + "aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp\n" + "Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n" + "ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg\n" + "nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1\n" + "HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N\n" + "Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN\n" + "dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0\n" + "HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO\n" + "BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G\n" + "CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU\n" + "sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3\n" + "4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg\n" + "8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K\n" + "pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1\n" + "mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0\n" + "-----END CERTIFICATE-----\n", // sfsroot-g2 "-----BEGIN CERTIFICATE-----\n" + "MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx\n" + "EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT\n" + "HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs\n" + "ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5\n" + "MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD\n" + "VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy\n" + "ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy\n" + "dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI\n" + "hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p\n" + "OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2\n" + "8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K\n" + "Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe\n" + "hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk\n" + "6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw\n" + "DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q\n" + "AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI\n" + "bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB\n" + "ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z\n" + "qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd\n" + "iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn\n" + "0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN\n" + "sSi6\n" + "-----END CERTIFICATE-----\n", // sfsroot "-----BEGIN CERTIFICATE-----\n" + "MIIEfjCCA2agAwIBAgIBADANBgkqhkiG9w0BAQUFADCBzzELMAkGA1UEBhMCVVMx\n" + "EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT\n" + "HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOjA4BgNVBAsTMWh0dHA6Ly9j\n" + "ZXJ0aWZpY2F0ZXMuc3RhcmZpZWxkdGVjaC5jb20vcmVwb3NpdG9yeS8xNjA0BgNV\n" + "BAMTLVN0YXJmaWVsZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0\n" + "eTAeFw0wODA2MDIwMDAwMDBaFw0yOTEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJV\n" + "UzEQMA4GA1UECBMHQXJpem9uYTETMBEGA1UEBxMKU2NvdHRzZGFsZTElMCMGA1UE\n" + "ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjE6MDgGA1UECxMxaHR0cDov\n" + "L2NlcnRpZmljYXRlcy5zdGFyZmllbGR0ZWNoLmNvbS9yZXBvc2l0b3J5LzE2MDQG\n" + "A1UEAxMtU3RhcmZpZWxkIFNlcnZpY2VzIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9y\n" + "aXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8sxWKk3mFjdal+pt\n" + "NTjREJvbuNypBAmVMy4JxQB7GnhCj8j0BY7+0miDHk6ZzRfbRz5Q84nS59yY+wX4\n" + "qtZj9FRNwXEDsB8bdrMaNDBz8SgyYIP9tJzXttIiN3wZqjveExBpblwG02+j8mZa\n" + "dkJIr4DRVFk91LnU2+25qzmZ9O5iq+F4cnvYOI1AtszcEgBwQ4Vp2Bjjyldyn7Tf\n" + "P/wiqEJS9XdbmfBWLSZwFjYSwieeV6Z80CPxedyjk1goOD2frTZD7jf7+PlDrchW\n" + "8pQSXkLrc7gTDcum1Ya5qihqVAOhPw8p6wkA6D9eon8XPaEr+L7QdR2khOOrF2UG\n" + "UgCvsQIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd\n" + "BgNVHQ4EFgQUtMZ/GkPMm3VdL8RL8ouYEOnxURAwHwYDVR0jBBgwFoAUtMZ/GkPM\n" + "m3VdL8RL8ouYEOnxURAwDQYJKoZIhvcNAQEFBQADggEBAKyAu8QlBQtYpOR+KX6v\n" + "vDvsLcBELvmR4NI7MieQLfaACVzCq2Uk2jgQRsRJ0v2aqyhId4jG6W/RR5HVNU8U\n" + "CahbQAcdfHFWy4lC1L9hwCL3Lt+r83JDi0DolOuwJtrRE9Or0DYtLjqVs3cuFTkY\n" + "DGm6qoDt8VNOM5toBOKgMC7X0V3UpmadhObnuzyJuzad/BepPVUrivubxEyE/9/S\n" + "vmkbdLCo9uqwnLIpdIFMaDqaf3MlOfUT4GaRadRXS7furUXgLMOI076USYkf/3DV\n" + "W205E7Ady5jmZ2MNY/b7w9dhcoOIP3B+U8meiVTWT399cbmu8WCLd2Ds+L/6aqOc\n" + "ASI=\n" + "-----END CERTIFICATE-----\n", // thawte_Premium_Server_CA "-----BEGIN CERTIFICATE-----\n" + "MIIDNjCCAp+gAwIBAgIQNhIilsXjOKUgodJfTNcJVDANBgkqhkiG9w0BAQUFADCB\n" + "zjELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJ\n" + "Q2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UE\n" + "CxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhh\n" + "d3RlIFByZW1pdW0gU2VydmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNl\n" + "cnZlckB0aGF3dGUuY29tMB4XDTk2MDgwMTAwMDAwMFoXDTIxMDEwMTIzNTk1OVow\n" + "gc4xCzAJBgNVBAYTAlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcT\n" + "CUNhcGUgVG93bjEdMBsGA1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNV\n" + "BAsTH0NlcnRpZmljYXRpb24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRo\n" + "YXd0ZSBQcmVtaXVtIFNlcnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1z\n" + "ZXJ2ZXJAdGhhd3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2\n" + "aovXwlue2oFBYo847kkEVdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560\n" + "ZXUCTe/LCaIhUdib0GfQug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j\n" + "+ao6hnO2RlNYyIkFvYMRuHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/\n" + "BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQBlkKyID1bZ5jA01CbH0FDxkt5r1DmI\n" + "CSLGpmODA/eZd9iy5Ri4XWPz1HP7bJyZePFLeH0ZJMMrAoT4vCLZiiLXoPxx7JGH\n" + "IPG47LHlVYCsPVLIOQ7C8MAFT9aCdYy9X9LcdpoFEsmvcsPcJX6kTY4XpeCHf+Ga\n" + "WuFg3GQjPEIuTQ==\n" + "-----END CERTIFICATE-----\n", // thawte_Primary_Root_CA-G2_ECC "-----BEGIN CERTIFICATE-----\n" + "MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL\n" + "MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp\n" + "IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi\n" + "BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw\n" + "MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh\n" + "d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig\n" + "YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v\n" + "dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/\n" + "BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6\n" + "papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E\n" + "BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K\n" + "DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3\n" + "KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox\n" + "XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==\n" + "-----END CERTIFICATE-----\n", // thawte_Primary_Root_CA-G3_SHA256 "-----BEGIN CERTIFICATE-----\n" + "MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB\n" + "rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf\n" + "Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw\n" + "MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV\n" + "BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa\n" + "Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl\n" + "LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u\n" + "MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl\n" + "ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz\n" + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm\n" + "gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8\n" + "YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf\n" + "b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9\n" + "9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S\n" + "zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk\n" + "OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV\n" + "HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA\n" + "2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW\n" + "oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu\n" + "t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c\n" + "KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM\n" + "m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu\n" + "MdRAGmI0Nj81Aa6sY6A=\n" + "-----END CERTIFICATE-----\n", // thawte_Primary_Root_CA "-----BEGIN CERTIFICATE-----\n" + "MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB\n" + "qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf\n" + "Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw\n" + "MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV\n" + "BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw\n" + "NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j\n" + "LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG\n" + "A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl\n" + "IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG\n" + "SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs\n" + "W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta\n" + "3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk\n" + "6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6\n" + "Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J\n" + "NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA\n" + "MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP\n" + "r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU\n" + "DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz\n" + "YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX\n" + "xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2\n" + "/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/\n" + "LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7\n" + "jVaMaA==\n" + "-----END CERTIFICATE-----\n", // thawte_Server_CA "-----BEGIN CERTIFICATE-----\n" + "MIIDIjCCAougAwIBAgIQNKT/9jCvTKU8MxdCoZRmdTANBgkqhkiG9w0BAQUFADCB\n" + "xDELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJ\n" + "Q2FwZSBUb3duMR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UE\n" + "CxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhh\n" + "d3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0\n" + "ZS5jb20wHhcNOTYwODAxMDAwMDAwWhcNMjEwMTAxMjM1OTU5WjCBxDELMAkGA1UE\n" + "BhMCWkExFTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3du\n" + "MR0wGwYDVQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlm\n" + "aWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZl\n" + "ciBDQTEmMCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8w\n" + "DQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl\n" + "/Kj0R1HahbUgdJSGHg91yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF\n" + "/rFrKbYvScg71CcEJRCXL+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982\n" + "OsK1ZiIS1ocNAgMBAAGjEzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEF\n" + "BQADgYEAvkBpQW/G28GnvwfAReTQtUMeTJUzNelewj4o9qgNUNX/4gwP/FACjq6R\n" + "ua00io2fJ3GqGcxL6ATK1BdrEhrWxl/WzV7/iXa/2EjYWb0IiokdV81FHlK6EpqE\n" + "+hiJX+j5MDVqAWC5mYCDhQpu2vTJj15zLTFKY6B08h+LItIpPus=\n" + "-----END CERTIFICATE-----\n", // trustcenter_TC_Universal_CA_III "-----BEGIN CERTIFICATE-----\n" + "MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV\n" + "BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1\n" + "c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy\n" + "MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl\n" + "ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm\n" + "BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG\n" + "SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF\n" + "5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv\n" + "DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v\n" + "zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT\n" + "yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj\n" + "dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh\n" + "MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB\n" + "Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI\n" + "4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz\n" + "dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY\n" + "aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G\n" + "DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV\n" + "CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH\n" + "LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==\n" + "-----END CERTIFICATE-----\n", // trustcenter_Universal_CA-I "-----BEGIN CERTIFICATE-----\n" + "MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV\n" + "BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1\n" + "c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx\n" + "MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg\n" + "R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD\n" + "VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN\n" + "AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR\n" + "JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T\n" + "fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu\n" + "jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z\n" + "wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ\n" + "fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD\n" + "VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO\n" + "BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G\n" + "CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1\n" + "7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn\n" + "8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs\n" + "ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT\n" + "ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/\n" + "2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY\n" + "-----END CERTIFICATE-----\n", // trustcenter_Universal_CA-II "-----BEGIN CERTIFICATE-----\n" + "MIIF3zCCA8egAwIBAgIOGTMAAQACKBqaBLzyVUUwDQYJKoZIhvcNAQEFBQAwejEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV\n" + "BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEnMCUGA1UEAxMeVEMgVHJ1\n" + "c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJMB4XDTA2MDMyMjE1NTgzNFoXDTMwMTIz\n" + "MTIyNTk1OVowejELMAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVy\n" + "IEdtYkgxJDAiBgNVBAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEnMCUG\n" + "A1UEAxMeVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJMIICIjANBgkqhkiG\n" + "9w0BAQEFAAOCAg8AMIICCgKCAgEAi9R3azRs5TbYalxeOO781R15Azt7g2JEgk6I\n" + "7d6D/+7MUGIFBZWZdpj2ufJf2AaRksL2LWYXH/1TA+iojWOpbuHWG4y8mLOLO9Tk\n" + "Lsp9hUkmW3m4GotAnn+7yT9jLM/RWny6KCJBElpN+Rd3/IX9wkngKhh/6aAsnPlE\n" + "/AxoOUL1JwW+jhV6YJ3wO8c85j4WvK923mq3ouGrRkXrjGV90ZfzlxElq1nroCLZ\n" + "gt2Y7X7i+qBhCkoy3iwX921E6oFHWZdXNwM53V6CItQzuPomCba8OYgvURVOm8M7\n" + "3xOCiN1LNPIz1pDp81PcNXzAw9l8eLPNcD+NauCjgUjkKa1juPD8KGQ7mbN9/pqd\n" + "iPaZIgiRRxaJNXhdd6HPv0nh/SSUK2k2e+gc5iqQilvVOzRZQtxtz7sPQRxVzfUN\n" + "Wy4WIibvYR6X/OJTyM9bo8ep8boOhhLLE8oVx+zkNo3aXBM9ZdIOXXB03L+PemrB\n" + "Lg/Txl4PK1lszGFs/sBhTtnmT0ayWuIZFHCE+CAA7QGnl37DvRJckiMXoKUdRRcV\n" + "I5qSCLUiiI3cKyTr4LEXaNOvYb3ZhXj2jbp4yjeNY77nrB/fpUcJucglMVRGURFV\n" + "DYlcjdrSGC1z8rjVJ/VIIjfRYvd7Dcg4i6FKsPzQ8eu3hmPn4A5zf/1yUbXpfeJV\n" + "BWR4Z38CAwEAAaNjMGEwHwYDVR0jBBgwFoAUzdeQoW6jv9sw1toyJZAM5jkegGUw\n" + "DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFM3XkKFu\n" + "o7/bMNbaMiWQDOY5HoBlMA0GCSqGSIb3DQEBBQUAA4ICAQB+FojoEw42zG4qhQc4\n" + "xlaJeuNHIWZMUAgxWlHQ/KZeFHXeTDvs8e3MfhEHSmHu6rOOOqQzxu2KQmZP8Tx7\n" + "yaUFQZmx7Cxb7tyW0ohTS3g0uW7muw/FeqZ8Dhjfbw90TNGp8aHp2FRkzF6WeKJW\n" + "GsFzshXGVwXf2vdIJIqOf2qp+U3pPmrOYCx9LZAI9mOPFdAtnIz/8f38DBZQVhT7\n" + "upeG7rRJA1TuG1l/MDoCgoYhrv7wFfLfToPmmcW6NfcgkIw47XXP4S73BDD7Ua2O\n" + "giRAyn0pXdXZ92Vk/KqfdLh9kl3ShCngE+qK99CrxK7vFcXCifJ7tjtJmGHzTnKR\n" + "N4xJkunI7Cqg90lufA0kxmts8jgvynAF5X/fxisrgIDV2m/LQLvYG/AkyRDIRAJ+\n" + "LtOYqqIN8SvQ2vqOHP9U6OFKbt2o1ni1N6WsZNUUI8cOpevhCTjXwHxgpV2Yj4wC\n" + "1dxWqPNNWKkL1HxkdAEy8t8PSoqpAqKiHYR3wvHMl700GXRd4nQ+dSf3r7/ufA5t\n" + "VIimVuImrTESPB5BeW0X6hNeH/Vcn0lZo7Ivo0LD+qh+v6WfSMlgYmIK371F3uNC\n" + "tVGW/cT1Gpm4UqJEzS1hjBWPgdVdotSQPYxuQGHDWV3Y2eH2dEcieXR92sqjbzcV\n" + "NvAsGnE8EXbfXRo+VGN4a2V+Hw==\n" + "-----END CERTIFICATE-----\n", // trustcenter_class_2_ii "-----BEGIN CERTIFICATE-----\n" + "MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV\n" + "BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0\n" + "Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1\n" + "OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i\n" + "SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc\n" + "VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD\n" + "ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf\n" + "tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg\n" + "uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J\n" + "XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK\n" + "8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99\n" + "5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud\n" + "EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3\n" + "kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy\n" + "dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6\n" + "Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz\n" + "JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290\n" + "Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u\n" + "TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS\n" + "GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt\n" + "ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8\n" + "au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV\n" + "hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI\n" + "dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==\n" + "-----END CERTIFICATE-----\n", // trustcenter_class_3_ii "-----BEGIN CERTIFICATE-----\n" + "MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV\n" + "BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0\n" + "Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1\n" + "OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i\n" + "SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc\n" + "VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD\n" + "ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW\n" + "Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q\n" + "Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2\n" + "1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq\n" + "ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1\n" + "Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud\n" + "EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX\n" + "XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy\n" + "dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6\n" + "Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz\n" + "JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290\n" + "Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u\n" + "TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN\n" + "irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8\n" + "TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6\n" + "g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB\n" + "95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj\n" + "S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==\n" + "-----END CERTIFICATE-----\n", // trustcenter_class_4_ii "-----BEGIN CERTIFICATE-----\n" + "MIIDtjCCAp6gAwIBAgIOBcAAAQACQdAGCk3OdRAwDQYJKoZIhvcNAQEFBQAwdjEL\n" + "MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV\n" + "BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDQgQ0ExJTAjBgNVBAMTHFRDIFRydXN0\n" + "Q2VudGVyIENsYXNzIDQgQ0EgSUkwHhcNMDYwMzIzMTQxMDIzWhcNMjUxMjMxMjI1\n" + "OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i\n" + "SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgNCBDQTElMCMGA1UEAxMc\n" + "VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgNCBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD\n" + "ggEPADCCAQoCggEBALXNTJytrlG7fEjFDSmGehSt2VA9CXIgDRS2Y8b+WJ7gIV7z\n" + "jyIZ3E6RIM1viCmis8GsKnK6i1S4QF/yqvhDhsIwXMynXX/GCEnkDjkvjhjWkd0j\n" + "FnmA22xIHbzB3ygQY9GB493fL3l1oht48pQB5hBiecugfQLANIJ7x8CtHUzXapZ2\n" + "W78mhEj9h/aECqqSB5lIPGG8ToVYx5ct/YFKocabEvVCUNFkPologiJw3fX64yhC\n" + "L04y87OjNopq1mJcrPoBbbTgci6VaLTxkwzGioLSHVPqfOA/QrcSWrjN2qUGZ8uh\n" + "d32llvCSHmcOHUJG5vnt+0dTf1cERh9GX8eu4I8CAwEAAaNCMEAwDwYDVR0TAQH/\n" + "BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFB/quz4lGwa9pd1iBX7G\n" + "TFq/6A9DMA0GCSqGSIb3DQEBBQUAA4IBAQBYpCubTPfkpJKknGWYGWIi/HIy6QRd\n" + "xMRwLVpG3kxHiiW5ot3u6hKvSI3vK2fbO8w0mCr3CEf/Iq978fTr4jgCMxh1KBue\n" + "dmWsiANy8jhHHYz1nwqIUxAUu4DlDLNdjRfuHhkcho0UZ3iMksseIUn3f9MYv5x5\n" + "+F0IebWqak2SNmy8eesOPXmK2PajVnBd3ttPedJ60pVchidlvqDTB4FAVd0Qy+BL\n" + "iILAkH0457+W4Ze6mqtCD9Of2J4VMxHL94J59bXAQVaS4d9VA61Iz9PyLrHHLVZM\n" + "ZHQqMc7cdalUR6SnQnIJ5+ECpkeyBM1CE+FhDOB4OiIgohxgQoaH96Xm\n" + "-----END CERTIFICATE-----\n" ] |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| parse-sql.js | 36.84% | (7 / 19) | 0% | (0 / 8) | 0% | (0 / 1) | 38.89% | (7 / 18) | |
| parsed-statement.js | 13.51% | (5 / 37) | 0% | (0 / 18) | 0% | (0 / 2) | 13.89% | (5 / 36) | |
| statement-matcher.js | 50% | (6 / 12) | 0% | (0 / 4) | 50% | (1 / 2) | 50% | (6 / 12) | |
| tracer.js | 22.58% | (28 / 124) | 0% | (0 / 48) | 0% | (0 / 18) | 23.73% | (28 / 118) | |
| util.js | 50% | (2 / 4) | 0% | (0 / 3) | 0% | (0 / 1) | 50% | (2 / 4) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'parse_sql'})
var StatementMatcher = require('./statement-matcher')
var ParsedStatement = require('./parsed-statement')
var stringifySync = require('../util/safe-json').stringifySync
var OPERATIONS = [
new StatementMatcher('select', /^\s*select[\S\s]*from[\s\[]+([^\]\s,)(;]*).*/gi),
new StatementMatcher('update', /^\s*update\s+([^\s,;]*).*/gi),
new StatementMatcher('insert', /^\s*insert(?:\s+ignore)?\s+into\s+([^\s(,;]*).*/gi),
new StatementMatcher('delete', /^\s*delete\s+from\s+([^\s,(;]*).*/gi)
]
var COMMENT_PATTERN = /\/\\*.*?\\*\//
// This must be called syncronously after the initial db call for backtraces to
// work correctly
module.exports = function parseSql(type, sql) {
// Sometimes we get an object here from MySQL. We have been unable to
// reproduce it, so we'll just log what that object is and return a statement
// type of `other`.
if (typeof sql === 'object' && sql.sql !== undefined) sql = sql.sql
if (typeof sql !== 'string') {
logger.trace(
'parseSQL got an a non-string sql that looks like: %s',
stringifySync(sql)
)
return new ParsedStatement(type, 'other', null, sql)
}
sql = sql.replace(COMMENT_PATTERN, '').trim()
var parsedStatement
for (var i = 0, l = OPERATIONS.length; i < l; i++) {
parsedStatement = OPERATIONS[i].getParsedStatement(type, sql)
if (parsedStatement) {
return parsedStatement
}
}
return new ParsedStatement(type, 'other', null, sql)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 | 1 1 1 1 1 | 'use strict'
var DB = require('../metrics/names').DB
var ALL = require('../metrics/names').ALL
function ParsedStatement(type, operation, model, raw) {
this.type = type
this.operation = operation
this.model = model
this.trace = null
this.raw = ''
if (typeof raw === 'string') {
this.trace = new Error()
this.raw = raw
}
}
ParsedStatement.prototype.recordMetrics = function recordMetrics(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var type = transaction.isWeb() ? DB.WEB : DB.OTHER
var thisTypeSlash = this.type + '/'
var operation = DB.OPERATION + '/' + thisTypeSlash + this.operation
// Rollups
transaction.measure(operation, null, duration, exclusive)
transaction.measure(DB.PREFIX + type, null, duration, exclusive)
transaction.measure(DB.PREFIX + thisTypeSlash + type, null, duration, exclusive)
transaction.measure(DB.PREFIX + thisTypeSlash + ALL, null, duration, exclusive)
transaction.measure(DB.ALL, null, duration, exclusive)
// If we can parse the SQL statement, create a 'statement' metric, and use it
// as the scoped metric for transaction breakdowns. Otherwise, skip the
// 'statement' metric and use the 'operation' metric as the scoped metric for
// transaction breakdowns.
if (this.model) {
var model = DB.STATEMENT + '/' + thisTypeSlash + this.model + '/' + this.operation
transaction.measure(model, null, duration, exclusive)
if (scope) transaction.measure(model, scope, duration, exclusive)
} else if (scope) {
transaction.measure(operation, scope, duration, exclusive)
}
// This recorder is side-effectful Because we are depending on the recorder
// setting the transaction name, recorders must always be run before generating
// the final transaction trace
segment.name = model || operation
// Datastore instance metrics.
if (segment.parameters.hasOwnProperty('host') &&
segment.parameters.hasOwnProperty('port_path_or_id')) {
var instanceName = DB.INSTANCE + '/' + thisTypeSlash + segment.parameters.host +
'/' + segment.parameters.port_path_or_id
transaction.measure(instanceName, null, duration, exclusive)
}
if (this.raw) {
transaction.agent.queries.addQuery(
segment,
this.type.toLowerCase(),
this.raw,
this.trace
)
}
}
module.exports = ParsedStatement
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | 1 1 4 4 1 1 | 'use strict'
var ParsedStatement = require('./parsed-statement')
function StatementMatcher(operation, operationPattern) {
this.operation = operation
this.operationPattern = operationPattern
}
StatementMatcher.prototype.getParsedStatement = function getParsedStatement(type, sql) {
this.operationPattern.lastIndex = 0
var match = new RegExp("^\\s*" + this.operation, "ig").test(sql)
if (match) {
var queryMatch = this.operationPattern.exec(sql)
var model = queryMatch ? queryMatch[1] : 'unknown'
return new ParsedStatement(type, this.operation, model, sql)
}
}
module.exports = StatementMatcher
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'query_tracer'})
var obfuscate = require('../util/sql/obfuscate')
var Stats = require('../stats')
var util = require('util')
var crypto = require('crypto')
var encode = require('../util/codec.js').encode
var path = require('path')
var NR_ROOT = path.resolve(__dirname, '..')
module.exports = QueryTracer
function QueryTracer(config) {
if (!(this instanceof QueryTracer)) {
return new QueryTracer(config)
}
this.samples = {}
this.config = config
}
QueryTracer.prototype.removeShortest = function removeShortest() {
var keys = Object.keys(this.samples)
var shortest
for (var i = 0, len = keys.length; i < len; ++i) {
var sample = this.samples[keys[i]].trace
if (!shortest || shortest.duration > sample.duration) {
shortest = sample
}
}
delete this.samples[shortest.normalized]
}
QueryTracer.prototype.merge = function merge(tracer) {
var keys = Object.keys(tracer.samples)
for (var i = 0, len = keys.length; i < len; ++i) {
if (this.samples[keys[i]]) {
this.samples[keys[i]].merge(tracer.samples[keys[i]])
} else {
this.samples[keys[i]] = tracer.samples[keys[i]]
}
}
}
QueryTracer.prototype.addQuery = function addQuery(segment, type, query, trace) {
var duration = segment.getDurationInMillis()
if (duration < this.config.transaction_tracer.explain_threshold) return
var slowQuery = new SlowQuery(segment, type, query, trace)
switch (this.config.transaction_tracer.record_sql) {
case 'raw':
logger.info('recording raw sql')
segment.parameters.sql = slowQuery.query
break
case 'obfuscated':
logger.info('recording obfuscated sql')
segment.parameters.sql_obfuscated = slowQuery.obfuscated
break
default:
logger.info(
'not collecting slow-query because transaction_tracer.record_sql was set to %s',
this.config.transaction_tracer.record_sql
)
return
}
segment.parameters.backtrace = slowQuery.trace
if (!this.config.slow_sql.enabled) return
if (this.samples[slowQuery.normalized]) {
return this.samples[slowQuery.normalized].aggregate(slowQuery)
}
this.samples[slowQuery.normalized] = new QuerySample(this, slowQuery)
if (Object.keys(this.samples).length > this.config.slow_sql.max_samples) {
this.removeShortest()
}
}
QueryTracer.prototype.prepareJSON = function prepareJSON(done) {
var keys = Object.keys(this.samples)
var remaining = keys.length
var data = []
if (!remaining) return done(null, data)
for (var i = 0; i < keys.length; ++i) {
this.samples[keys[i]].prepareJSON(collect)
}
function collect(err, json) {
if (err) {
done(err)
// turn callback into a noop so it can't be called more than once
done = noop
return
}
data.push(json)
if (!--remaining) done(null, data)
}
function noop() {}
}
function QuerySample(tracer, slowQuery) {
Stats.call(this)
this.tracer = tracer
this.trace = slowQuery
this.aggregate(slowQuery)
}
util.inherits(QuerySample, Stats)
QuerySample.prototype.aggregate = function aggregate(slowQuery) {
this.recordValue(slowQuery.duration)
if (this.trace && this.trace.duration >= slowQuery.duration) return
this.trace = slowQuery
}
QuerySample.prototype.merge = function merge(sample) {
Stats.prototype.merge.call(this, sample)
if (this.trace.duration < sample.trace.duration) {
this.trace = sample.trace
}
}
QuerySample.prototype.prepareJSON = function prepareJSON(done) {
var transaction = this.trace.segment.transaction
var sample = this
var trace = sample.trace
var params = sample.getParams()
if (!this.tracer.config.simple_compression) {
encode(params, respond)
} else {
process.nextTick(respond.bind(null, null, params))
}
function respond(err, data) {
if (err) return done(err)
done(null, [
transaction.name,
transaction.url || '<unknown>',
trace.id,
getQuery(sample.tracer.config, trace),
trace.metric,
sample.callCount,
sample.total,
sample.min,
sample.max,
data
])
}
}
QuerySample.prototype.getParams = function getParams() {
var segmentParams = this.trace.segment.parameters
var params = {
backtrace: this.trace.trace,
}
if (segmentParams.host) {
params.host = segmentParams.host
}
if (segmentParams.port_path_or_id) {
params.port_path_or_id = segmentParams.port_path_or_id
}
if (segmentParams.database_name) {
params.database_name = segmentParams.database_name
}
return params
}
function SlowQuery(segment, type, query, trace) {
this.obfuscated = obfuscate(query, type)
this.normalized = this.obfuscated.replace(/\?\s*,\s*|\s*/g, '')
this.id = normalizedHash(this.normalized)
this.segment = segment
this.query = query
this.metric = segment.name
this.trace = formatTrace(trace)
this.duration = segment.getDurationInMillis()
}
function normalizedHash(value) {
return parseInt(crypto.createHash('md5').update(value).digest('hex').slice(-4), 16)
}
function formatTrace(trace) {
// remove error message and instrumentation frames from stack trace
return trace ? trace.stack.split('\n').slice(1).filter(notNR).join('\n') : ''
}
function notNR(frame) {
return frame.indexOf(NR_ROOT) === -1
}
function getQuery(config, trace) {
switch (config.transaction_tracer.record_sql) {
case 'raw':
return trace.query
case 'obfuscated':
return trace.obfuscated
default:
return '?'
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 | 1 1 | 'use strict' module.exports.extractDatabaseChangeFromUse = extractDatabaseChangeFromUse function extractDatabaseChangeFromUse(sql) { // The character ranges for this were pulled from // http://dev.mysql.com/doc/refman/5.7/en/identifiers.html var match = /^\s*use[^\w`]+([\w$_\u0080-\uFFFF]+|`[^`]+`)[\s;]*$/i.exec(sql) return match && match[1] || null } |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| aggregator.js | 19.38% | (31 / 160) | 0% | (0 / 84) | 0% | (0 / 18) | 20.67% | (31 / 150) | |
| index.js | 10.53% | (8 / 76) | 0% | (0 / 50) | 0% | (0 / 3) | 10.81% | (8 / 74) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var copy = require('../util/copy')
var urltils = require('../util/urltils')
var logger = require('../logger').child({component: 'error_tracer'})
var NAMES = require('../metrics/names')
var errorsModule = require('./index')
var Reservoir = require('../reservoir.js')
var WeakSet = global.WeakSet
var createError = errorsModule.createError
var createEvent = errorsModule.createEvent
module.exports = ErrorAggregator
/*
*
* CONSTANTS
*
*/
var MAX_ERRORS = 20
/**
* ErrorAggregator is responsible for collecting JS errors and errored-out HTTP
* transactions, and for converting them to error traces and error events expected by
* the collector.
*/
function ErrorAggregator(config) {
this.config = config
this.errorCount = 0
this.webTransactionErrorCount = 0
this.otherTransactionErrorCount = 0
this.errors = []
this.seenObjectsByTransaction = {}
this.seenStringsByTransaction = {}
// reservoir used for error events
this.events = new Reservoir(this.config.error_collector.max_event_samples_stored)
}
/**
* Every finished transaction goes through this handler, so do as
* little as possible.
*/
ErrorAggregator.prototype.onTransactionFinished = onTransactionFinished
function onTransactionFinished(transaction, metrics) {
if (!transaction) throw new Error("Error collector got a blank transaction.")
if (!metrics) throw new Error("Error collector requires metrics to count errors.")
if (transaction.ignore) return
// collect user errors even if status code is ignored
var collectedErrors = 0
var exception, i
if (transaction.userErrors.length > 0) {
for (i = 0; i < transaction.userErrors.length; i++) {
exception = transaction.userErrors[i]
if (this._collect(transaction, exception[0], exception[1], exception[2])) {
collectedErrors++
}
}
}
var hasExceptions = transaction.exceptions.length > 0
var isErroredTransaction = urltils.isError(this.config, transaction.statusCode)
var isIgnoredErrorStatusCode = urltils.isIgnoredError(this.config,
transaction.statusCode)
// collect other exceptions only if status code is not ignored
if (hasExceptions && !isIgnoredErrorStatusCode) {
for (i = 0; i < transaction.exceptions.length; i++) {
exception = transaction.exceptions[i]
if (this._collect(transaction, exception[0], exception[1], exception[2])) {
collectedErrors++
}
}
} else if (isErroredTransaction) {
if (this._collect(transaction)) {
collectedErrors++
}
}
// the metric should be incremented only if the error was actually collected
if (collectedErrors > 0) {
var count = metrics.getOrCreateMetric(NAMES.ERRORS.PREFIX + transaction.name)
count.incrementCallCount(collectedErrors)
}
}
/**
* This function collects the error right away when transaction is not supplied.
* Otherwise it delays collecting the error until the transaction ends.
*
* NOTE: this interface is unofficial and may change in future.
*
* @param {Transaction} transaction Transaction associated with the error
* (optional).
* @param {Error} exception The error to be traced.
* @param {object} customParameters Any custom parameters associated with
* the request (optional).
*/
ErrorAggregator.prototype.add = function add(transaction, exception, customParameters) {
if (!exception) return
var timestamp = Date.now()
if (transaction) {
transaction.addException(exception, customParameters, timestamp)
} else {
this._collect(transaction, exception, customParameters, timestamp)
}
}
/**
* This function is used to collect errors specifically added using the noticeError() API.
* Similarly to add(), it collects the error right away when transaction is not supplied.
* Otherwise it delays collecting the error until the transaction ends.
* The reason for separating the API errors from other exceptions is that different ignore
* rules apply to them.
*
* NOTE: this interface is unofficial and may change in future.
*
* @param {Transaction} transaction Transaction associated with the error
* (optional).
* @param {Error} exception The error to be traced.
* @param {object} customParameters Any custom parameters associated with
* the request (optional).
*/
ErrorAggregator.prototype.addUserError = function addUserError(transaction, exception,
customParameters) {
if (!exception) return
var timestamp = Date.now()
if (transaction) {
transaction.addUserError(exception, customParameters, timestamp)
} else {
this._collect(transaction, exception, customParameters, timestamp)
}
}
/**
*
* This function takes an exception and determines whether the exception
* has been seen before by this aggregator. This function mutates the
* book keeping structures to reflect the exception has been seen.
*
* @param {Error} exception The error to be checked.
*
*/
ErrorAggregator.prototype.haveSeen = function haveSeen(transaction, exception) {
if (!transaction) {
transaction = {id: 'Unknown'}
}
if (typeof exception === 'object') {
if (!this.seenObjectsByTransaction[transaction.id]) {
if (WeakSet) {
this.seenObjectsByTransaction[transaction.id] = new WeakSet()
} else {
this.seenObjectsByTransaction[transaction.id] = []
}
}
var seenObjects = this.seenObjectsByTransaction[transaction.id]
if (WeakSet) {
if (seenObjects.has(exception)) {
return true
}
seenObjects.add(exception)
} else {
if (seenObjects.indexOf(exception) !== -1) {
return true
}
seenObjects.push(exception)
}
} else { // typeof exception !== 'object'
if (!this.seenStringsByTransaction[transaction.id]) {
this.seenStringsByTransaction[transaction.id] = {}
}
var seenStrings = this.seenStringsByTransaction[transaction.id]
if (seenStrings[exception]) {
return true
}
seenStrings[exception] = true
}
return false
}
/**
* Collects the error and also creates the error event.
* This function uses an array of seen exceptions to ensure errors don't get
* double-counted. It can also be used as an unofficial means of marking that
* user errors shouldn't be traced.
*
* For an error to be traced, at least one of the transaction or the error
* must be present.
*
* NOTE: this interface is unofficial and may change in future.
*
* @param {Transaction} transaction Transaction associated with the error
* (optional).
* @param {Error} exception The error to be traced (optional).
* @param {object} customParameters Any custom parameters associated with
* the request (optional).
* @returns {bool} True if the error was collected.
*/
ErrorAggregator.prototype._collect = _collect
function _collect(transaction, exception, customParameters, timestamp) {
if (exception) {
if (this.haveSeen(transaction, exception)) {
return
}
if (typeof exception !== 'string' && !exception.message && !exception.stack) {
logger.trace(exception,
"Got error that is not an instance of Error or string.")
exception = null
}
}
if (!exception) {
if (!transaction) return
if (!transaction.statusCode) return
if (transaction.error) return
}
this.errorCount++
if (transaction) {
if (transaction.isWeb()) {
this.webTransactionErrorCount++
} else {
this.otherTransactionErrorCount++
}
}
// allow enabling & disabling the error tracer at runtime
// TODO: it would be better to check config in the public add() to prevents collecting
// errors on the transaction unnecessarily
if (!this.config.collect_errors ||
!this.config.error_collector || !this.config.error_collector.enabled) return
if (exception) {
logger.trace(exception, "Got exception to trace:")
}
var error = createError(transaction, exception, customParameters, this.config)
if (this.errors.length < MAX_ERRORS) {
logger.debug({error: error}, "Error to be sent to collector:")
// XXX: 2016-05-24 Remove this when APM UI is updated to use correct request_uri
//
// For right now, when this flag is enabled, the request_uri will be added
// to the error data. This will result in duplicated data being displayed on
// APM which is a no-go, so we need to remove it here. However, we want the
// data to still be there for error events metrics, so we need to perform a
// deep copy and only remove it from this data.
//
// In order to save cycles, we perform a smart deep copy in the form of a
// series of shallow copies down just the path that needs to change.
if (this.config.feature_flag.send_request_uri_attribute) {
var err = []
err.push.apply(err, error)
err[4] = copy.shallow(err[4])
err[4].agentAttributes = copy.shallow(err[4].agentAttributes)
delete err[4].agentAttributes.request_uri
this.errors.push(err)
} else {
this.errors.push(error)
}
} else {
logger.debug("Already have %d errors to send to collector, not keeping.",
MAX_ERRORS)
}
// add error event
if (this.config.error_collector.capture_events === true) {
this.events.add(createEvent(transaction, error, timestamp))
}
return true
}
/**
* Returns collected errors.
*/
ErrorAggregator.prototype.getErrors = function getErrors() {
return this.errors
}
/**
* Returns error events based on seen errors.
*/
ErrorAggregator.prototype.getEvents = function getEvents() {
return this.events.toArray()
}
/**
* Returns maximum number of events that are collected per a harvest cycle.
*/
ErrorAggregator.prototype.getEventsLimit = function getEventsLimit() {
return this.events.limit
}
/**
* Returns number of events that have been seen since the last harvest cycle.
*/
ErrorAggregator.prototype.getEventsSeen = function getEventsSeen() {
return this.events.seen
}
/**
* Returns total number of collected errors.
*/
ErrorAggregator.prototype.getTotalErrorCount = function getTotalErrorCount() {
return this.errorCount
}
/**
* Returns total number of errors collected during web transactions.
*/
ErrorAggregator.prototype.getWebTransactionsErrorCount =
function getWebTransactionsErrorCount() {
return this.webTransactionErrorCount
}
/**
* Returns total number of errors collected during background transactions.
*/
ErrorAggregator.prototype.getBackgroundTransactionsErrorCount =
function getOtherTransactionsErrorCount() {
return this.otherTransactionErrorCount
}
/**
* If the connection to the collector fails, retain as many as will fit without
* overflowing the current error list.
*
* @param array errors Previously harvested errors.
*/
ErrorAggregator.prototype.merge = function merge(errors) {
if (!errors) return
var len = Math.min(errors.length, MAX_ERRORS - this.errors.length)
logger.warn("Merging %s (of %s) errors for next delivery.", len, errors.length)
for (var i = 0; i < len; i++) this.errors.push(errors[i])
}
ErrorAggregator.prototype.mergeEvents = function mergeEvents(events) {
this.events.merge(events)
}
ErrorAggregator.prototype.clearEvents = function clearEvents() {
this.events = new Reservoir(this.config.error_collector.max_event_samples_stored)
}
ErrorAggregator.prototype.clearErrors = function clearErrors() {
this.errors = []
this.seenStringsByTransaction = {}
this.seenObjectsByTransaction = {}
this.errorCount = 0
this.webTransactionErrorCount = 0
this.otherTransactionErrorCount = 0
}
ErrorAggregator.prototype.reconfigure = function reconfigure(config) {
this.config = config
this.events.setLimit(this.config.error_collector.max_event_samples_stored)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 | 1 1 1 1 1 1 1 1 | 'use strict'
var urltils = require('../util/urltils')
var NAMES = require('../metrics/names')
var util = require('util')
module.exports.createError = createError
module.exports.createEvent = createEvent
/**
* Given either or both of a transaction and an exception, generate an error
* trace in the JSON format expected by the collector. Since this will be
* used by both the HTTP instrumentation, which uses HTTP status codes to
* determine whether a transaction is in error, and the domain-based error
* handler, which traps actual instances of Error, try to set sensible
* defaults for everything.
*
* @param {Transaction} transaction The agent transaction, presumably
* coming out of the instrumentation.
* @param {Error} exception Something trapped by an error listener.
* @param {object} customParameters Any custom parameters associated with
* the request (optional).
*/
function createError(transaction, exception, customParameters, config) {
var name = 'Unknown'
var message = ''
var type = 'Error'
var params = {
request_uri: '',
userAttributes: {},
agentAttributes: {},
intrinsics: {}
}
// String errors do not provide us with as much information to provide to the
// user, but it is a common pattern.
if (typeof exception === 'string') {
message = exception
} else if (exception !== null && typeof exception === 'object' && exception.message) {
message = exception.message
if (exception.name) {
type = exception.name
} else if (exception.constructor && exception.constructor.name) {
type = exception.constructor.name
}
} else if (transaction && transaction.statusCode &&
urltils.isError(config, transaction.statusCode)) {
message = 'HttpError ' + transaction.statusCode
}
if (transaction) {
// transaction.getName is expensive due to running normalizers and ignore
// rules if a name hasn't been assigned yet. Also has the side effect of
// changing the transaction's url property or ignore status.
var txName = transaction.getName()
if (txName) {
name = txName
}
if (transaction.isWeb()) {
params.request_uri = transaction.getScrubbedUrl()
}
// Copy all of the parameters off of the transaction.
params.agentAttributes = transaction.trace.parameters
params.intrinsics = transaction.getIntrinsicAttributes()
// Custom params aren't filtered by capture_params or ignore_params, just by
// high security mode.
if (!config.high_security) {
urltils.overwriteParameters(config, transaction.trace.custom, params.userAttributes)
}
}
// This will strip out any ignored params or not include the custom params if
// capture params is disabled.
if (!config.high_security && customParameters) {
urltils.overwriteParameters(config, customParameters, params.userAttributes)
}
var stack = exception && exception.stack
if (stack) params.stack_trace = ('' + stack).split(/[\n\r]/g)
var res = [0, name, message, type, params]
if (transaction) {
Object.defineProperty(res, 'transaction', {
value: transaction.id
})
}
return res
}
/**
* Creates a structure for error event that is sent to the collector.
* The error parameter is an output of the createError() function for a given exception.
*/
function createEvent(transaction, error, timestamp) {
var message = error[2]
var errorClass = error[3]
var paramsFromError = error[4]
var intrinsicAttributes = _getErrorEventIntrinsicAttrs(transaction, errorClass, message,
timestamp)
// the error structure created by createError() already performs filtering of custom
// and agent attributes, so it is ok to just copy them
var userAttributes = util._extend({}, paramsFromError.userAttributes)
var agentAttributes = util._extend({}, paramsFromError.agentAttributes)
var errorEvent = [
intrinsicAttributes,
userAttributes,
agentAttributes
]
return errorEvent
}
function _getErrorEventIntrinsicAttrs(transaction, errorClass, message, timestamp) {
// the server expects seconds instead of milliseconds
if (timestamp) timestamp = timestamp / 1000
var attributes = {
type: "TransactionError",
"error.class": errorClass,
"error.message": message,
timestamp: timestamp
}
if (transaction) {
attributes.transactionName = transaction.name
attributes.duration = transaction.timer.getDurationInMillis() / 1000
var metric = transaction.metrics.getMetric(NAMES.QUEUETIME)
if (metric) {
attributes.queueDuration = metric.total
}
metric = transaction.metrics.getMetric(NAMES.EXTERNAL.ALL)
if (metric) {
attributes.externalDuration = metric.total
attributes.externalCallCount = metric.callCount
}
metric = transaction.metrics.getMetric(NAMES.DB.ALL)
if (metric) {
attributes.databaseDuration = metric.total
attributes.databaseCallCount = metric.callCount
}
if (transaction.syntheticsData) {
attributes["nr.syntheticsResourceId"] = transaction.syntheticsData.resourceId
attributes["nr.syntheticsJobId"] = transaction.syntheticsData.jobId
attributes["nr.syntheticsMonitorId"] = transaction.syntheticsData.monitorId
}
attributes['nr.transactionGuid'] = transaction.id
attributes['nr.referringTransactionGuid'] = transaction.referringTransactionGuid
if (transaction.port) {
attributes.port = transaction.port
}
} else {
attributes.transactionName = 'Unknown'
}
return attributes
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| bluebird.js | 75% | (3 / 4) | 100% | (0 / 0) | 0% | (0 / 1) | 75% | (3 / 4) | |
| cassandra-driver.js | 21.62% | (8 / 37) | 0% | (0 / 17) | 0% | (0 / 5) | 23.53% | (8 / 34) | |
| connect.js | 11.76% | (8 / 68) | 0% | (0 / 39) | 0% | (0 / 8) | 12.5% | (8 / 64) | |
| director.js | 9.68% | (6 / 62) | 0% | (0 / 26) | 0% | (0 / 10) | 10.17% | (6 / 59) | |
| express.js | 9.38% | (30 / 320) | 0% | (0 / 223) | 0% | (0 / 31) | 9.46% | (30 / 317) | |
| generic-pool.js | 10.53% | (2 / 19) | 0% | (0 / 5) | 0% | (0 / 7) | 10.53% | (2 / 19) | |
| hapi.js | 13.84% | (22 / 159) | 0% | (0 / 88) | 0% | (0 / 31) | 14.77% | (22 / 149) | |
| ioredis.js | 35% | (7 / 20) | 0% | (0 / 8) | 0% | (0 / 4) | 35% | (7 / 20) | |
| memcached.js | 24.24% | (8 / 33) | 0% | (0 / 17) | 0% | (0 / 6) | 24.24% | (8 / 33) | |
| mongodb.js | 17.68% | (29 / 164) | 0% | (0 / 82) | 0% | (0 / 26) | 17.79% | (29 / 163) | |
| mysql.js | 9.02% | (12 / 133) | 0% | (0 / 72) | 0% | (0 / 22) | 9.02% | (12 / 133) | |
| node-cassandra-cql.js | 30% | (6 / 20) | 0% | (0 / 9) | 0% | (0 / 4) | 30% | (6 / 20) | |
| oracle.js | 23.53% | (16 / 68) | 0% | (0 / 4) | 0% | (0 / 20) | 24.24% | (16 / 66) | |
| pg.js | 17.91% | (24 / 134) | 0% | (0 / 58) | 0% | (0 / 28) | 18.05% | (24 / 133) | |
| promise.js | 12.99% | (23 / 177) | 0% | (0 / 119) | 0% | (0 / 30) | 12.99% | (23 / 177) | |
| q.js | 36.36% | (4 / 11) | 0% | (0 / 4) | 0% | (0 / 2) | 36.36% | (4 / 11) | |
| redis.js | 19.3% | (11 / 57) | 0% | (0 / 50) | 0% | (0 / 8) | 19.3% | (11 / 57) | |
| restify.js | 16.22% | (6 / 37) | 0% | (0 / 18) | 0% | (0 / 7) | 18.18% | (6 / 33) | |
| when.js | 80% | (4 / 5) | 100% | (0 / 0) | 0% | (0 / 1) | 80% | (4 / 5) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 | 1 1 1 | 'use strict'
var promInit = require('./promise')
var BLUEBIRD_SPEC = {
name: 'bluebird',
constructor: 'Promise',
$proto: {
then: ['then', 'done', 'spread', 'all', 'asCallback', 'nodeify', 'finally', 'lastly'],
catch: ['catch', 'caught', 'error'],
// _resolveFromResolver is in bluebird 2.x
// _execute is in bluebird 3.x
executor: ['_execute', '_resolveFromResolver']
},
$static: {
cast: [
'resolve', 'fullfilled', 'cast', 'reject', 'rejected', 'fromNode',
'fromCallback', 'all'
]
}
}
module.exports = function initialize(agent, bluebird) {
promInit(agent, bluebird, BLUEBIRD_SPEC)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 | 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var CASSANDRA = require('../metrics/names').CASSANDRA
var parseSql = require('../db/parse-sql')
module.exports = function initialize(agent, cassandra) {
var tracer = agent.tracer
var proto = cassandra.Client.prototype
shimmer.wrapMethod(proto, 'Cassandra.Client.prototype', ['_innerExecute'], wrapExec)
shimmer.wrapMethod(proto, 'Cassandra.Client.prototype', ['batch'], wrapBatch)
shimmer.wrapMethod(
proto,
'Cassandra.Client.prototype',
['connect'],
tracer.wrapFunctionLast.bind(tracer, CASSANDRA.OPERATION + '/connect', null)
)
shimmer.wrapMethod(
proto,
'Cassandra.Client.prototype',
['shutdown'],
tracer.wrapFunctionLast.bind(tracer, CASSANDRA.OPERATION + '/shutdown', null)
)
function wrapExec(original) {
return tracer.wrapFunction(
CASSANDRA.STATEMENT + 'Unknown',
null,
original,
wrappedExec
)
function wrappedExec(segment, args, bind) {
var ps = parseSql(CASSANDRA.PREFIX, args[0])
var model = (ps.model || 'unknown')
if (this.keyspace && model.indexOf('.') === -1) model = this.keyspace + '.' + model
segment.name = CASSANDRA.STATEMENT + model + '/' + ps.operation
segment.transaction.addRecorder(ps.recordMetrics.bind(ps, segment))
var last = args.length - 1
args[last] = bind(args[last])
return args
}
}
function wrapBatch(original) {
return tracer.wrapFunction(
CASSANDRA.STATEMENT + 'Unknown',
null,
original,
wrappedBatch
)
function wrappedBatch(segment, args, bind) {
var sql = (args[0] && args[0][0]) || ''
if (sql.query) sql = sql.query
var ps = parseSql(CASSANDRA.PREFIX, sql)
var model = (ps.model || 'unknown')
if (this.keyspace && model.indexOf('.') === -1) model = this.keyspace + '.' + model
segment.name = CASSANDRA.STATEMENT + model + '/' + ps.operation + '/batch'
segment.transaction.addRecorder(ps.recordMetrics.bind(ps, segment))
var last = args.length - 1
args[last] = bind(args[last])
return args
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 | 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var logger = require('../logger').child({component: 'connect'})
/*
*
* CONSTANTS
*
*/
var ORIGINAL = '__NR_original'
var RESERVED = [ // http://es5.github.io/#x7.6.1.2
// always (how would these even get here?)
'class', 'enum', 'extends', 'super', 'const', 'export', 'import',
// strict
'implements', 'let', 'private', 'public', 'yield', 'interface',
'package', 'protected', 'static'
]
/**
* ES5 strict mode disallows some identifiers that are allowed in non-strict
* code. Mangle function names that are on that list of keywords so they're
* non-objectionable in strict mode (which is currently enabled everywhere
* inside the agent, as well as at many customer sites).
*
* If you really need to crawl your Express apps middleware stack, change
* your test to use name.indexOf('whatever') === 0 as the predicate instead
* of name === 'whatever'. It's a little slower, but you shouldn't be doing
* that anyway.
*
* @param {string} name The candidate function name
*
* @returns {string} A safe (potentially mangled) function name.
*/
function mangle(name) {
var parts = name.split(' ')
name = parts[parts.length - 1]
if (RESERVED.indexOf(name) !== -1) return name + '_'
return name
}
module.exports = function initialize(agent, connect) {
var tracer = agent.tracer
var interceptor = {
route: '',
handle: function sentinel(error, req, res, next) {
if (error) {
var transaction = agent.tracer.getTransaction()
agent.errors.add(transaction, error)
}
return next(error)
}
}
/**
* Problem:
*
* 1. Connect determines whether middleware functions are error handlers by
* testing their arity. Not cool.
* 2. Downstream Express users rely upon being able to iterate over their
* middleware stack to find specific middleware functions. Sorta less
* uncool, but still a pain.
*
* Solution:
*
* Use eval. This once. For this one specific purpose. Not anywhere else for
* any reason.
*/
function wrapHandle(__NR_handle) {
// jshint -W061
var arglist
var name = ''
// reiterated: testing function arity is stupid
switch (__NR_handle.length) {
case 2:
arglist = '(req, res)'
break
case 3:
arglist = '(req, res, next)'
break
// don't break other error handlers
case 4:
arglist = '(err, req, res, next)'
break
default:
arglist = '()'
}
if (__NR_handle.name) name = mangle(__NR_handle.name)
// leave this function anonymous
// it's connect madness
/* eslint-disable func-names */
var template = function() {
var args = tracer.slice(arguments)
var last = args.length - 1
if (typeof args[last] === 'function') {
args[last] = tracer.bindFunction(args[last])
}
__NR_handle.apply(this, args)
}
/* eslint-enable func-names */
// I am a bad person and this makes me feel bad.
// We use eval because we need to insert the function with a specific name to
// allow for lookup.
/* eslint-disable no-eval */
var wrapped = eval(
'(function(){return function ' + name + arglist +
template.toString().substring(11) + '}())'
)
/* eslint-enable no-eval */
wrapped[ORIGINAL] = __NR_handle
return wrapped
}
function wrapUse(use) {
return function cls_wrapUse() {
if (!this.stack) return use.apply(this, arguments)
this.stack = this.stack.filter(function cb_filter(m) {
return m !== interceptor
})
/* We allow `use` to go through the arguments so it can reject bad things
* for us so we don't have to also do argument type checking.
*/
var app = use.apply(this, arguments)
// wrap most recently added unwrapped handler
var top = this.stack.pop()
if (top) {
if (top.handle &&
typeof top.handle === 'function' &&
!top.handle[ORIGINAL]) {
top.handle = wrapHandle(top.handle)
}
this.stack.push(top)
}
/* Give the error tracer a better chance of intercepting errors by
* putting it before the first error handler (a middleware that takes 4
* parameters, in Connects world). Error handlers tend to be placed
* towards the end of the middleware chain and sometimes don't pass
* errors along. Don't just put the interceptor at the beginning because
* we want to allow as many middleware functions to execute as possible
* before the interceptor is run, to increase error coverage.
*
* NOTE: This is heuristic, and works because interceptor propagates
* errors instead of terminating the middleware chain.
* Ignores routes.
*/
var spliced = false
for (var i = 0; i < this.stack.length; i++) {
var middleware = this.stack[i]
// Check to see if it is an error handler middleware
if (middleware &&
middleware.handle &&
middleware.handle.length === 4) {
this.stack.splice(i, 0, interceptor)
spliced = true
break
}
}
if (!spliced) this.stack.push(interceptor)
// don't break chaining
return app
}
}
/**
* Connect 1 and 2 are very different animals, but like Express, it mostly
* comes down to factoring.
*/
var version = connect && connect.version && connect.version[0]
switch (version) {
case '1':
shimmer.wrapMethod(connect && connect.HTTPServer && connect.HTTPServer.prototype,
'connect.HTTPServer.prototype',
'use',
wrapUse)
break
case '2':
shimmer.wrapMethod(connect && connect.proto,
'connect.proto',
'use',
wrapUse)
break
default:
logger.debug("Unrecognized version %s of Connect detected; not instrumenting.",
version)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 | 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var logger = require('../logger.js').child({component: 'director'})
var NAMES = require('../metrics/names.js')
function nameTransaction(segment, partialName, res) {
if (!segment) return logger.error("No New Relic context to set Director route name on.")
if (!partialName) return logger.error("No partialName to use for naming.")
if (res.finished) return // no need to update transaction name if response has ended
var transaction = segment.transaction
var nameState = transaction.nameState
if (res.__NR_directored) { // not first route
nameState.pathStack.pop() // replace latest path name, preserving the last path name
}
nameState.appendPath(partialName)
nameState.setVerb(transaction.verb)
nameState.setDelimiter(NAMES.ACTION_DELIMITER)
res.__NR_directored = true
}
module.exports = function initialize(agent, director) {
var tracer = agent.tracer
shimmer.wrapMethod(
director.Router.prototype,
'director.Router.prototype',
'mount',
function wrapMount(mount) {
return function wrappedMount(routes, path) {
if (!routes) {
return mount.call(this, routes, path)
}
Object.keys(director.http.methods).forEach(function wrapMethod(methodKey) {
var method = director.http.methods[methodKey]
if (routes[method]) { // method exists as attribute
var route = routes[method] // wrapping associated cb function
routes[method] = createWrapped(method, path, route)
}
})
function createWrapped(method, path, route) {
if (route.__NR_original) {
route = route.__NR_original
}
var wrapped = function wrappedRoute() {
var transaction = tracer.getTransaction()
if (!transaction) {
return route.apply(this, arguments)
}
var response = this.res // hang directored attr, and check if res is finished
var pathName = path.join('/')
var partialName = pathName
var segment = tracer.createSegment('Function/' + (route.name || "anonymous"))
nameTransaction(tracer.segment, partialName, response)
return tracer.bindFunction(route, segment, true).apply(this, arguments)
}
wrapped.__NR_original = route
return wrapped
}
return mount.call(this, routes, path)
}
}
)
shimmer.wrapMethod(
director.Router.prototype,
'director.Router.prototype',
['on', 'route'],
function wrapOn(on) {
return function wrappedOn(method, path, route) {
// if we are handed unexpected argument types pass them through
// and let director handle the error case
if (!route) {
return on.call(this, method, path, route)
}
var partialName = this.scope.join('/') + path
if (route.__NR_original) {
route = route.__NR_original
}
var wrapped = function wrappedRoute() {
var transaction = tracer.getTransaction()
if (!transaction) {
return route.apply(this, arguments)
}
var response = this.res // hang directored attr, and check if res is finished
var segment = tracer.createSegment('Function/' + (route.name || "anonymous"))
nameTransaction(tracer.segment, partialName, response)
return tracer.bindFunction(route, segment, true).apply(this, arguments)
}
wrapped.__NR_original = route
return on.call(this, method, path, wrapped)
}
}
)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer.js')
var urltils = require('../util/urltils.js')
var logger = require('../logger.js').child({component: 'express'})
var record = require('../metrics/recorders/generic.js')
var middlewareRecorder = require('../metrics/recorders/express.js')
var NAMES = require('../metrics/names.js')
var express2 = require('./express/express-2.js')
var ensurePartialName = require('./express/common.js').ensurePartialName
var VIEW = NAMES.VIEW
var ORIGINAL = '__NR_original'
var RESERVED = [ // http://es5.github.io/#x7.6.1.2
// always (how would these even get here?)
'class', 'enum', 'extends', 'super', 'const', 'export', 'import',
// strict
'implements', 'let', 'private', 'public', 'yield', 'interface',
'package', 'protected', 'static'
]
/**
* ES5 strict mode disallows some identifiers that are allowed in non-strict
* code. Mangle function names that are on that list of keywords so they're
* non-objectionable in strict mode (which is currently enabled everywhere
* inside the agent, as well as at many customer sites).
*
* If you really need to crawl your Express apps middleware stack, change
* your test to use name.indexOf('whatever') === 0 as the predicate instead
* of name === 'whatever'. It's a little slower, but you shouldn't be doing
* that anyway.
*
* @param {string} name The candidate function name
*
* @returns {string} A safe (potentially mangled) function name.
*/
function mangle(name) {
var parts = name.split(' ')
name = parts[parts.length - 1]
if (RESERVED.indexOf(name) !== -1) return name + '_'
return name
}
function generateMiddlewarePath(routerStack, layer) {
var res = ''
for (var i = 0; i < routerStack.length; ++i) {
appendToPath(routerStack[i].path)
}
if (layer && layer.handle && layer.handle.__NR_path) {
appendToPath(layer.handle.__NR_path)
if (res[res.length - 1] === '/') {
res = res.substr(0, res.length - 1)
}
}
return res || '/'
function appendToPath(path) {
if (!path) return
var resTrailingSlash = res[res.length - 1] === '/'
if (path[0] === '/') {
path = path.substr(1)
}
if (resTrailingSlash) {
res += path
} else {
res += '/' + path
}
}
}
var transactionInfoById = {}
function onTransactionFinished(transaction) {
delete transactionInfoById[transaction.id]
}
module.exports = function initialize(agent, express) {
var tracer = agent.tracer
var registered =
agent.listeners('transactionFinished')
.indexOf(onTransactionFinished) !== -1
if (!registered) {
agent.on('transactionFinished', onTransactionFinished)
}
function setDispatcher(app) {
return function wrappedCreateServer() {
agent.environment.setDispatcher('express')
agent.environment.setFramework('express')
return app.apply(this, arguments)
}
}
/**
* This needs to be kept up to date with Express to ensure that it's using
* the same logic to decide where the callback is hiding.
*/
function wrapRender(version, render) {
/* jshint maxparams:5 */ // follow Express as closely as possible
return function cls_wrapRender(view, options, cb, parent, sub) {
logger.trace("Rendering Express %d view %s.", version, view)
if (!tracer.getTransaction()) {
logger.trace(
'Express %d view %s rendered outside transaction, not measuring.',
version,
view
)
return render.apply(this, arguments)
}
var name = VIEW.PREFIX + view + VIEW.RENDER
var segment = tracer.createSegment(name, record)
var wrapped
if (typeof options === 'function') {
cb = options
options = null
}
if (cb === null || cb === undefined) {
/* CAUTION: Need this to generate a metric, but adding a callback
* changes Express's control flow.
*/
wrapped = tracer.bindFunction(function syntheticCallback(err, rendered) {
if (err) {
segment.end()
logger.trace(err,
"Express %d rendering for metric %s failed for transaction %s:",
version,
name,
segment.transaction.id)
return this.req.next(err)
}
segment.end()
var returned = this.send(rendered)
logger.trace("Rendered Express %d view with metric %s for transaction %s.",
version,
name,
segment.transaction.id)
return returned
}.bind(this))
} else {
wrapped = tracer.bindFunction(function renderWrapper() {
segment.end()
var returned = cb.apply(this, arguments)
return returned
}, segment)
}
return render.call(this, view, options, wrapped, parent, sub)
}
}
// wrap express.Router.process_params() in order to get to the Layer class, which
// we need to wrap
function wrapProcessParams(version, process_params) {
return function cls_wrapProcessParams(layer) {
var transaction = tracer.getTransaction()
if (!transaction) {
logger.trace(
'Express %d router called outside transaction (wrapProcessParams).',
version
)
return process_params.apply(this, arguments)
}
// process_params is the first place (I think) where we have access to a Layer
// instance. Layer is basically the interface for (req, res, next) - could be
// a container for actual route handler, or an instance of Router, or an instance
// of an Express sub-app.
// Layers are called in the order they are defined. The method
// Layer.handle_request(req, res, next) is the method called in the chain.
// If we wrap it, we know exactly when each handler in the chain is called. Based on
// that we can build the path from nested route handlers.
if (layer) {
if (layer.constructor) {
var layerProto = layer.constructor.prototype
if (
layerProto.handle_request &&
!shimmer.isWrapped(layerProto.handle_request)
) {
shimmer.wrapMethod(layerProto,
'express.Layer',
'handle_request',
wrapLayerHandleRequest)
}
if (
layerProto.handle_error &&
!shimmer.isWrapped(layerProto.handle_error)
) {
shimmer.wrapMethod(layerProto,
'express.Layer',
'handle_error',
wrapLayerHandleError)
}
}
}
function wrapLayerHandleError(handleError) {
return function wrappedLayerHandleError(error, req, res, next) {
var parent = tracer.segment
var segment = null
if (!parent) {
return handleError.apply(this, arguments)
}
var transaction = parent.transaction
if (!transaction.isActive()) {
return handleError.apply(this, arguments)
}
var transactionInfo = getTransactionInfo(transaction)
if (agent.config.feature_flag.express_segments) {
var handlerName = (this.handle.name || 'anonymous')
if (this.handle.length === 4) {
var prefix = NAMES.EXPRESS.ERROR_HANDLER
segment = tracer.createSegment(prefix + handlerName, record)
logger.trace(
'Creating segment for middleware %s. Transaction id: %s, name: %s',
segment.name,
transaction.id,
transaction.nameState.getName()
)
if (segment) {
segment.start()
}
}
transactionInfo.lastMiddlewareSegment = segment
}
if (next) {
arguments[3] = endErrorHandlerRecorder
}
this.__NR_parent = parent
handleError.apply(this, arguments)
function endErrorHandlerRecorder() {
segment && segment.end()
return tracer.bindFunction(next, parent).apply(this, arguments)
}
}
}
function wrapLayerHandleRequest(handleRequest) {
return function wrappedLayerHandleRequest(req, res, next) {
var segment = null
var parent = tracer.getSegment()
if (!parent) {
return handleRequest.apply(this, arguments)
}
var transaction = parent.transaction
var transactionInfo = getTransactionInfo(transaction)
if (!transaction.isActive()) {
return handleRequest.apply(this, arguments)
}
var isErrorHandler = (this.handle.length === 4)
var isMountedApp = (this.name === 'mounted_app' ||
(this.handle != null && this.handle.lazyrouter != null))
var isRouter = (this.handle[ORIGINAL] != null && this.handle[ORIGINAL].stack) ||
this.handle.stack != null
var isRoute = (this.route != null)
var stack = (this.handle &&
(this.handle[ORIGINAL] && this.handle[ORIGINAL].stack) ||
this.handle.stack) ||
(this.route && this.route.stack)
var routerStack = transactionInfo.routerStack
if (agent.config.feature_flag.express_segments) {
var parentSegment
if (routerStack.length > 0) {
parentSegment = routerStack[routerStack.length - 1].segment
}
if (!isErrorHandler) {
var segmentName
var recorder
if (isMountedApp) {
segmentName = NAMES.EXPRESS.PREFIX + 'Mounted App: ' +
this.handle.__NR_path
} else if (isRouter) {
segmentName = NAMES.EXPRESS.PREFIX + 'Router: '
if (this.handle.__NR_path) {
segmentName += this.handle.__NR_path
} else {
segmentName += '/'
}
} else if (isRoute) {
segmentName = NAMES.EXPRESS.PREFIX + 'Route Path: ' +
this.handle.__NR_path
} else {
var middlewareName = (this.handle.name || 'anonymous')
segmentName = NAMES.EXPRESS.MIDDLEWARE + middlewareName
var middlewarePath = generateMiddlewarePath(routerStack, this)
recorder = middlewareRecorder.bind(null, middlewarePath)
}
segment = tracer.createSegment(segmentName, recorder, parentSegment)
logger.trace(
'Creating segment for middleware %s. Transaction id: %s, name: %s',
segment.name,
transaction.id,
transaction.nameState.getName()
)
}
}
// END FEATURE FLAG
if (isRouter || isRoute) {
routerStack.push({
length: stack.length,
path: this.handle.__NR_path,
segment: segment
})
} else if (isMountedApp) {
routerStack.push({
length: null,
path: this.handle.__NR_path,
segment: segment
})
} else {
transactionInfo.lastMiddlewareSegment = segment
}
// call cleanup before next middleware function in order to restore
// transaction.partialName
if (next) {
arguments[2] = cleanup
}
this.__NR_parent = parent
return tracer.bindFunction(handleRequest, segment || parent, !!segment)
.apply(this, arguments)
function cleanup() {
var parentRouter
if (!transactionInfo.responded && routerStack.length > 0) {
parentRouter = routerStack[routerStack.length - 1]
if (isMountedApp || isRouter || isRoute) {
routerStack.pop()
}
}
parentRouter && parentRouter.segment && parentRouter.segment.touch()
segment && segment.end()
return tracer.bindFunction(next, parent).apply(this, arguments)
}
}
}
return process_params.apply(this, arguments)
}
}
/**
* Problem:
*
* 1. Express determines whether middleware functions are error handlers by
* testing their arity. Not cool.
* 2. Downstream Express users rely upon being able to iterate over their
* middleware stack to find specific middleware functions. Sorta less
* uncool, but still a pain.
*
* Solution:
*
* Use eval. This once. For this one specific purpose. Not anywhere else for
* any reason.
*/
function wrapHandle(__NR_handle, path) {
var name = ''
var arglist
// reiterated: testing function arity is stupid
switch (__NR_handle.length) {
case 2:
arglist = '(req, res)'
break
case 3:
arglist = '(req, res, next)'
break
// don't break other error handlers
case 4:
arglist = '(err, req, res, next)'
break
default:
arglist = '()'
}
if (__NR_handle.name) name = mangle(__NR_handle.name)
// All closure scope variables used in templates must be passed
// in as formal arguments to the wrapped function constructor
/* eslint-disable func-names */
/* eslint-disable no-undef */
var routerTemplate = function() {
return wrappedHandle.call(layer, path, __NR_handle, [].slice.call(arguments))
}
/* eslint-enable no-undef */
/* eslint-enable func-names */
// I am a bad person and this makes me feel bad.
// We use eval because we need to insert the function with a specific
// name to allow for lookup.
/* eslint-disable no-new-func */
var wrapped = new Function(
'tracer', '__NR_handle', 'wrappedHandle', 'path', 'layer',
'return function ' + name + arglist + routerTemplate.toString().substring(11)
)(tracer, __NR_handle, wrappedHandle, path, this)
/* eslint-enable no-new-func */
wrapped[ORIGINAL] = __NR_handle
// pull the attributes from the original handle up to the wrapped one
var handleKeys = Object.keys(__NR_handle)
for (var i = 0; i < handleKeys.length; i++) {
var key = handleKeys[i]
wrapped[key] = __NR_handle[key]
}
wrapped.__NR_path = path
return wrapped
}
function wrapMiddlewareStack(route, original) {
return function cls_wrapMiddlewareStack() {
/* We allow `use` to go through the arguments so it can reject bad things
* for us so we don't have to also do argument type checking.
*/
var app = original.apply(this, arguments)
var path = typeof arguments[0] === 'string' ? arguments[0] : '/'
if (arguments[0] instanceof RegExp) {
path = arguments[0].toString()
}
/* Express adds routes to the same stack as middleware. We need to wrap
* that adder too but we only want to wrap the middleware that are
* added, not the Router.
*/
// wrap most recently added unwrapped handler
var i = this.stack.length
var top
/* eslint-disable no-cond-assign */
while (top = this.stack[--i]) {
if (!top.handle || typeof top.handle !== 'function' || top.handle[ORIGINAL]) {
break
}
top.handle = wrapHandle.call(top, top.handle, path)
}
/* eslint-enable no-cond-assign */
return app
}
}
function wrappedHandle(path, handle, args) {
var transaction = agent.tracer.getTransaction()
if (!transaction) {
logger.trace(
'Express %d handle for path %s called outside transaction (wrappedHandle).',
version,
path
)
return handle.apply(this, args)
}
var isErrorHandler = handle.length === 4
var transactionInfo = getTransactionInfo(transaction)
var req = args[0]
var isRouter = this.handle[ORIGINAL] != null && this.handle[ORIGINAL].stack ||
this.handle.stack != null
var isRoute = this.route != null
if (req && (isRoute || isRouter)) {
if (req.params) {
// Express 4.3.0 changed where params live. On newer versions of Express
// params should be populated, on older it shouldn't be.
urltils.copyParameters(
transaction.agent.config,
req.params,
transaction.webSegment.parameters
)
}
}
if (!transactionInfo.error || transactionInfo.errorHandled) {
ensurePartialName(transaction)
transaction.nameState.appendPath(path)
}
// Update the namestate if we have a callback that we can unwind with.
var last = args.length - 1
var cb = args[last]
if (cb instanceof Function) {
if (isErrorHandler && transactionInfo.error) {
transactionInfo.errorHandled = true
}
args[last] = function wrappedHandleCallback(err) {
// if an error is passed to the next function, we do not want
// to mutate the name path so the transaction will be named
// after the location the error was generated.
if (err && err !== 'route') {
transactionInfo.error = err
} else if (!transactionInfo.responded) {
transaction.nameState.popPath(path)
}
return cb.apply(this, arguments)
}
}
// wrap res.end to mark the transaction as responded
var res = args[1]
if (res && res.end && !res.end[ORIGINAL]) {
var oldEnd = res.end
res.end = function wrappedEnd() {
// end the current middleware segment
if (transactionInfo.lastMiddlewareSegment) {
transactionInfo.lastMiddlewareSegment.end()
}
// end all router segments
var routerStack = transactionInfo.routerStack
if (routerStack.length > 0) {
for (var i = (routerStack.length - 1); i >= 0; i--) {
if (routerStack[i].segment) {
routerStack[i].segment.end()
}
}
}
transactionInfo.responded = true
var err = transactionInfo.error
var errHandled = transactionInfo.errorHandled
var isHttpError = urltils.isError(agent.config, this.statusCode)
// report error if it was not handled by an error handler, or when
// the status code is an HTTP error (more useful to report the actual error
// than a generic HTTP status error)
if (err && (!errHandled || isHttpError)) {
agent.errors.add(transaction, err)
}
logger.trace(
'res.end called, transaction id: %s, name: %s.',
transaction.id,
transaction.nameState.getName()
)
return oldEnd.apply(res, arguments)
}
res.end[ORIGINAL] = oldEnd
}
return handle.apply(this, args)
}
function wrapParamware(param) {
return function wrappedParam(paramName, fn) {
if (fn instanceof Function && typeof paramName === 'string') {
return param.call(this, paramName, paramwareWrapper)
}
return param.apply(this, arguments)
/* eslint-disable no-unused-vars */
function paramwareWrapper(req, res, next, value, name) {
/* eslint-enable no-unused-vars */
// Make sure we are correct about which parameter is the callback and
// that we have a transaction which is currently active!
var transaction = tracer.getTransaction()
if (!(next instanceof Function && transaction)) {
return fn.apply(this, arguments)
}
// Create the param handler segment.
transaction.nameState.appendPath('[param handler :' + paramName + ']')
var transactionInfo = getTransactionInfo(transaction)
var middlewarePath = generateMiddlewarePath(transactionInfo.routerStack, this)
var recorder = middlewareRecorder.bind(null, middlewarePath)
var segmentName = NAMES.EXPRESS.PREFIX + 'Param Handler: ' + paramName
var segment = tracer.createSegment(segmentName, recorder)
// Out of an excess of caution, make sure the segment was actually
// created. In theory since we have an active transaction this should
// not fail, but I don't trust anything anymore.
if (!segment) {
return fn.apply(this, arguments)
}
transactionInfo.lastMiddlewareSegment = segment
// Wrap next with some cleanup.
var args = tracer.slice(arguments)
args[2] = tracer.bindFunction(function wrappedNext() {
segment.touch()
transaction.nameState.popPath()
return next.apply(this, arguments)
}, segment)
// Call the paramware.
segment.start()
return tracer.bindFunction(fn, segment).apply(this, args)
}
}
}
function getTransactionInfo(transaction) {
if (!transactionInfoById[transaction.id]) {
transactionInfoById[transaction.id] = {
// since Layer.handleRequest isn't scoped to a transaction we need to
// track the transaction state outside in these maps.
// routerStacks is transaction.id -> a stack of the last seen routers
// and their segments. this is for reconstructing the call tree while
// we traverse using next
routerStack: [],
// if a handler responds asynchronously and calls next synchronously,
// which is the usual case, we will incorrectly name the segment as a
// middleware rather than a responder. this tracks whether a response
// has gone out for the transaction yet.
responded: false,
// we don't want to send errors that a user is handling themselves, so
// we stash the errors we see for a transaction till we know they
// aren't handling it themselves
error: null,
// if we see an error handler in the middleware tree we consider the
// error handled
errorHandled: false,
// when res.end() is called from a middleware handler, we need to end
// the middleware segment that contains it
lastMiddlewareSegment: null
}
}
return transactionInfoById[transaction.id]
}
/**
* Major versions of express have very different factoring,
* even though the core instrumentation is the same.
*/
var version = express && express.version && express.version[0]
/* TJ decided he didn't want to deal with the hassle of updating a
* version field. Thanks, TJ!
*/
if (!version && express && express.application &&
express.application.init && express.response &&
express.response.render && express.Router &&
express.Router.prototype.matchRequest) {
version = '3'
} else if (!version && express && express.application &&
express.application.init && express.response &&
express.response.render && express.Router &&
express.Router.process_params && express.application.del) {
version = '4'
} else if (!version && express && express.application &&
!express.application.del) {
version = '5'
}
switch (version) {
case '2':
/* Express 2 doesn't directly expose its Router constructor, so create an
* app and grab the constructor off it. Do it before instrumenting
* createServer so the agent doesn't automatically set the dispatcher
* to Express.
*/
var oneoff = express.createServer()
var Router = oneoff.routes.constructor
shimmer.wrapMethod(express,
'express',
'createServer',
setDispatcher)
/* Express 2 squirts its functionality directly onto http.ServerResponse,
* leaving no clean way to wrap its functionality without pulling in the
* http module ourselves.
*/
var http = require('http')
shimmer.wrapMethod(http.ServerResponse.prototype,
'http.ServerResponse.prototype',
'render',
wrapRender.bind(null, 2))
shimmer.wrapMethod(Router.prototype,
'Router.prototype',
'_match',
express2.wrapMatchRequest.bind(null, tracer, 2))
break
case '3':
shimmer.wrapMethod(express.application,
'express.application',
'init',
setDispatcher)
shimmer.wrapMethod(express.response,
'express.response',
'render',
wrapRender.bind(null, 3))
shimmer.wrapMethod(express.Router.prototype,
'express.Router.prototype',
'matchRequest',
express2.wrapMatchRequest.bind(null, tracer, 3))
shimmer.wrapMethod(express.Router,
'express.Router',
'param',
wrapParamware)
break
case '4':
shimmer.wrapMethod(express.application,
'express.application',
'init',
setDispatcher)
shimmer.wrapMethod(express.response,
'express.response',
'render',
wrapRender.bind(null, 4))
shimmer.wrapMethod(express.Router,
'express.Router',
'process_params',
wrapProcessParams.bind(null, 4))
shimmer.wrapMethod(express.Router,
'express.Router',
'use',
wrapMiddlewareStack.bind(null, false))
shimmer.wrapMethod(express.Router,
'express.Router',
'route',
wrapMiddlewareStack.bind(null, true))
shimmer.wrapMethod(express.Router,
'express.Router',
'param',
wrapParamware)
break
case '5':
// FLAG: express5 instrumentation
if (agent.config.feature_flag.express5) {
shimmer.wrapMethod(express.application,
'express.application',
'init',
setDispatcher)
shimmer.wrapMethod(express.response,
'express.response',
'render',
wrapRender.bind(null, 5))
shimmer.wrapMethod(express.Router.prototype,
'express.Router.prototype',
'process_params',
wrapProcessParams.bind(null, 5))
shimmer.wrapMethod(express.Router.prototype,
'express.Router.prototype',
'use',
wrapMiddlewareStack.bind(null, false))
shimmer.wrapMethod(express.Router.prototype,
'express.Router.prototype',
'route',
wrapMiddlewareStack.bind(null, true))
shimmer.wrapMethod(express.Router,
'express.Router',
'param',
wrapParamware)
}
break
default:
logger.warn("Unrecognized version %s of Express detected; not instrumenting",
version)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 | 1 1 | 'use strict'
var shimmer = require('../shimmer')
module.exports = function initialize(agent, generic) {
shimmer.wrapMethod(generic, 'generic-pool', 'Pool', function cb_wrapMethod(Pool) {
return function cls_wrapMethod() {
var pooler = Pool.apply(this, arguments)
shimmer.wrapMethod(pooler, 'Pool', 'acquire', function cb_wrapMethod(acquire) {
return function propagateTransactionThroughPool(callback, priority) {
if (typeof callback === 'function') {
/* See adjustCallback in generic-pool.js for the motivation behind
* this grotesque hack. Tl;dr: depending on Function.length is evil.
*/
var proxied = agent.tracer.bindFunction(callback)
switch (callback.length) {
case 2:
callback = function moveAlongNothingToSeeHere(error, client) {
return proxied.call(this, error, client)
}
break
case 1:
callback = function moveAlongNothingToSeeHere(client) {
return proxied.call(this, client)
}
break
default:
callback = proxied
}
}
return acquire.call(this, callback, priority)
}
})
return pooler
}
})
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer.js')
var urltils = require('../util/urltils.js')
var logger = require('../logger.js').child({component: 'hapi'})
var record = require('../metrics/recorders/generic.js')
var NAMES = require('../metrics/names.js')
var VIEW = NAMES.VIEW
function nameFromRequest(segment, request) {
if (!segment) return logger.error("No New Relic context to set Hapi route name on.")
if (!request) return logger.debug("No Hapi request to use for naming.")
var transaction = segment.transaction
var path = request.route && request.route.path
if (!path) return logger.debug({request: request}, "No path found on Hapi route.")
urltils.copyParameters(transaction.agent.config, request.params, segment.parameters)
transaction.nameState.setName(NAMES.HAPI.PREFIX, transaction.verb,
NAMES.ACTION_DELIMITER, path)
}
function setDispatcher(agent) {
agent.environment.setDispatcher('hapi')
agent.environment.setFramework('hapi')
}
module.exports = function initialize(agent, hapi) {
if (!agent) return logger.error("Hapi instrumentation bootstrapped without agent")
if (!hapi) return logger.error("Hapi instrumentation applied without module")
var tracer = agent.tracer
function wrapRender(render) {
return function wrappedRender(filename, context, options, callback) {
var wrapped = callback
// FIXME: this is going to be the most recent segment, which may not be right
var segment = tracer.getSegment()
if (segment && callback) {
wrapped = tracer.bindFunction(function cb_bindFunction() {
segment.end()
return callback.apply(this, arguments)
})
}
return render.call(this, filename, context, options, wrapped)
}
}
function wrapStart(start) {
return function wrappedStart() {
setDispatcher(agent)
/* The patched module loader doesn't access the filesystem itself, so
* lazily apply the patch to Views.prototype.render only once a Views
* object has been assigned as the view manager.
*/
if (this._views) {
logger.debug('Hapi view manager set; instrumenting render.')
var proto = this._views.constructor.prototype
shimmer.wrapMethod(proto, 'hapi.Views.prototype', 'render', wrapRender)
}
return start.apply(this, arguments)
}
}
function wrapViews(views) {
return function wrappedViews() {
var returned = views.apply(this, arguments)
/* The patched module loader doesn't access the filesystem itself, so
* lazily apply the patch to Views.prototype.render only once a Views
* object has been assigned as the view manager.
*/
if (this._views) {
var proto = this._views.constructor.prototype
shimmer.wrapMethod(proto, 'hapi.Views.prototype', 'render', wrapRender)
} else {
logger.warn('Hapi view manager set without manager actually being created.')
}
return returned
}
}
function wrapReplyView(reply) {
reply.view = tracer.wrapFunction(VIEW.PREFIX, record, reply.view, wrapper)
function wrapper(segment, args) {
segment.name = VIEW.PREFIX + args[0] + VIEW.RENDER
return args
}
}
function wrapHandler(handler) {
return function cls_wrapHandler(request, reply) {
if (!tracer.getTransaction()) {
logger.trace("Hapi route handler called outside transaction.")
return handler.apply(this, arguments)
}
nameFromRequest(tracer.getSegment(), request)
if (reply && reply.view) wrapReplyView(reply)
return handler.apply(this, arguments)
}
}
/**
* Compare the before and after state of the router and apply the route wrapper
* to the new routes.
*
* @param {object} before - State of the router before the new routes were added.
* @param {object} after - State of the router after the new routes were added.
* @param {string} vhost - If the user is letting hapi route its vhosts, use
* it in logging for debugging.
* @param {function} visit - Function used to wrap up the new routes.
*/
function tableVisitor(before, after, vhost, visit) {
if (!vhost) vhost = '*'
if (after) {
Object.keys(after).forEach(function cb_forEach(method) {
var beforeHandlers = before && before[method]
var afterHandlers = after[method]
// hapi 8 nested routes a little deeper.
if (afterHandlers.routes) {
afterHandlers = afterHandlers.routes
}
for (var i = 0; i < afterHandlers.length; i++) {
var route = afterHandlers[i]
logger.debug('Instrumented hapi route [host %s] %s %s',
vhost, method, route.path)
if (!beforeHandlers || beforeHandlers.indexOf(route) === -1) {
// hapi@6.9.0 started nesting the route handler 1 layer deeper
if (route.route) {
route = route.route
}
if (route.settings && route.settings.handler) {
route.settings.handler = visit(route.settings.handler)
} else {
logger.warn(
'Could not find handler to instrument for hapi route [host %s] %s %s',
vhost,
method,
route.path
)
}
}
}
})
}
}
/**
* This is pretty slow but only happens at route add time so optimizing it
* is of limited benefit. It is also moderately complex so lets go through
* what it does:
*
* 1. Gather the state of the router into `before*` variables.
* 2. Apply the new route(s) that are being added (which could be an array of
* routes, and cover a number of different methods).
* 3. Get the new state of the router.
* 4. Pass it all to the table vistor which applies the route wrapper to all
* the of individual routes that were just added.
*/
function wrapRoute(_route) {
return function wrappedRoute(configs, env) {
var server = this
var router = server._router
if (!router) return logger.warn("no router found on hapi server")
var vhosts = router.vhosts
var beforeHosts = {}
if (vhosts) {
logger.debug("capturing vhosts on hapi router")
Object.keys(vhosts).forEach(function cb_forEach(host) {
beforeHosts[host] = {}
Object.keys(vhosts[host]).forEach(function cb_forEach(method) {
var routes = vhosts[host][method]
// hapi 8 nested routes a little deeper.
if (routes && routes.routes) {
routes = routes.routes
}
beforeHosts[host][method] = routes.slice()
})
})
}
var symbol
if (typeof router.table === 'function') {
// hapi 2: router.table -> router.routes & router.table is a function
symbol = 'routes'
} else {
// hapi 1: when vhosts aren't used, router.table contains the routes
symbol = 'table'
}
var table = router[symbol]
var beforeTable = {}
if (table) {
Object.keys(table).forEach(function cb_forEach(method) {
// hapi 8 nested routes a little deeper.
var routes = table[method]
if (routes.routes) {
routes = routes.routes
}
beforeTable[method] = routes.slice()
})
}
var returned = _route.call(this, configs, env)
vhosts = router.vhosts
if (vhosts) {
Object.keys(vhosts).forEach(function cb_forEach(host) {
tableVisitor(beforeHosts[host], vhosts[host], host, wrapHandler)
})
}
// Object could have been switched out, make sure to get a fresh one.
table = router[symbol]
if (table) tableVisitor(beforeTable, table, undefined, wrapHandler)
return returned
}
}
function wrapCreateServer(createServer) {
return function createServerWrapper() {
var server = createServer.apply(this, arguments)
shimServerPrototype(
server.constructor.prototype,
'hapi.Server.constructor.prototype'
)
// Now that we have instrumented the server prototype, un-instrument
// createServer as it serves no purpose.
shimmer.unwrapMethod(hapi, 'hapi', 'createServer')
return server
}
}
function shimServerPrototype(proto, name) {
shimmer.wrapMethod(proto, name, 'start', wrapStart)
shimmer.wrapMethod(proto, name, 'views', wrapViews)
shimmer.wrapMethod(proto, name, '_route', wrapRoute)
}
function wrapConnection(connection) {
return function wrappedConnection() {
setDispatcher(agent)
// Server.prototype returns a connection object
var plugin = connection.apply(this, arguments)
// Defensive against the possiblity that there isn't a connection for some
// reason.
if (plugin && plugin.connections && plugin.connections.length > 0) {
shimmer.wrapMethod(
plugin.connections[0].constructor.prototype,
'hapi.Connection.constructor.prototype',
'_route',
wrapRoute
)
shimmer.wrapMethod(
plugin.connections[0].server._replier.constructor.prototype,
'hapi.Connection.server._replier.constructor.prototype',
'interface',
wrapInterface
)
// Unwrap connection now that we've managed to patch the prototype
shimmer.unwrapMethod(
hapi.Server.prototype,
'hapi.Server.prototype',
'connection'
)
}
return plugin
}
}
function wrapInterface(replier) {
return function wrappedInterface() {
var reply = replier.apply(this, arguments)
shimmer.wrapMethod(
reply,
'hapi.Reply',
'response',
wrapResponse
)
return reply
}
}
function wrapResponse(response) {
return function wrappedResponse() {
var segment = agent.tracer.getSegment()
if (segment) segment.touch()
return response.apply(this, arguments)
}
}
var proto = hapi && hapi.Server && hapi.Server.prototype
if (proto && proto.start && proto.views && proto._route) { // Hapi 1 - 7.1.1
shimServerPrototype(proto, 'hapi.Server.prototype')
} else if (proto && Object.keys(proto).length === 0) { // Hapi 7.2 - 7.5.2
// This gets removed on first invocation as it is just used to patch a
// deeper prototype.
shimmer.wrapMethod(hapi, 'hapi', 'createServer', wrapCreateServer)
} else if (proto && proto.start && proto.route && proto.connection) { // Hapi 8+
shimmer.wrapMethod(proto, 'hapi.Server.prototype', 'connection', wrapConnection)
} else { // Some unknown future or hacked up version
logger.warn('hapi Server constructor not found; can\'t instrument')
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 | 1 1 1 1 1 1 1 | 'use strict'
var stringifySync = require('../util/safe-json').stringifySync
var shimmer = require('../shimmer')
var urltils = require('../util/urltils.js')
var recordRedis = require('../metrics/recorders/redis.js')
var REDIS = require('../metrics/names').REDIS
module.exports = function initialize(agent, redis) {
var tracer = agent.tracer
shimmer.wrapMethod(
redis && redis.prototype,
'redis.prototype',
'sendCommand',
function wrapSendCommand(original) {
return tracer.wrapFunction(
REDIS.OPERATION + 'Unknown',
recordRedis,
original,
wrapper
)
}
)
function wrapper(segment, args) {
var command = args[0]
var keys = command.args
segment.name = REDIS.OPERATION + (command.name || 'unknown')
if (keys && typeof keys !== 'function') {
urltils.copyParameters(agent.config,
{key: stringifySync(keys[0], 'Unknown')}, segment.parameters)
}
// capture connection info for datastore instance metric
segment.port = this.connector.options.port
segment.host = this.connector.options.host
// record duration when promise resolves
command.promise.finally(function cb_resolved() {
segment.touch()
})
return args
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 | 1 1 1 1 1 1 1 1 | 'use strict'
var stringifySync = require('../util/safe-json').stringifySync
var shimmer = require('../shimmer.js')
var urltils = require('../util/urltils.js')
var recordMemcache = require('../metrics/recorders/memcached.js')
var MEMCACHE = require('../metrics/names.js').MEMCACHE
function wrapKeys(metacall) {
if (metacall.key) {
return [metacall.key]
} else if (metacall.multi) {
return metacall.command.split(' ').slice(1)
}
return []
}
/**
* Thanks to Hernan Silberman!
*
* instrument the memcached driver to intercept calls and keep stats on them.
*/
module.exports = function initialize(agent, memcached) {
var tracer = agent.tracer
shimmer.wrapMethod(
memcached && memcached.prototype,
'memcached.prototype',
'command',
function commandWrapper(original) {
return tracer.wrapFunction(
MEMCACHE.OPERATION + 'Unknown',
recordMemcache,
original,
wrapCommand
)
}
)
function wrapCommand(segment, args, bind) {
// The `command` method takes two arguments: a query generator and a server
// address. The query generator returns a simple object describing the
// memcached call. The server parameter is only provided for multi-calls.
// When not provided, it can be derived from the key being interacted with.
var metacall = args[0]()
var server = args[1]
var keys = wrapKeys(metacall)
segment.name = MEMCACHE.OPERATION + (metacall.type || 'Unknown')
// Capture connection info for datastore instance metric.
var location = null
if (typeof server === 'string') {
location = server.split(':')
} else if (this.HashRing && this.HashRing.get && metacall.key) {
location = this.HashRing.get(metacall.key).split(':')
}
if (location) {
segment.captureDBInstanceAttributes(location[0], location[1], false)
}
urltils.copyParameters(
agent.config,
{
key: stringifySync(keys[0], 'Unknown')
},
segment.parameters
)
/* Memcache call description includes a callback to apply when the
* operation is concluded. Wrap that to trace the duration of the
* operation.
*/
shimmer.wrapMethod(
metacall,
'metacall',
'callback',
function wrapMetacallCallback(callback) {
return bind(callback, true, true)
}
)
// rewrap the metacall for the command object
args[0] = function rewrapped() {
return metacall
}
// finally, execute the original command
return args
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var ParsedStatement = require('../db/parsed-statement')
var shimmer = require('../shimmer')
var logger = require('../logger').child({component: 'mongodb'})
var MONGODB = require('../metrics/names').MONGODB
var MONGO_SEGMENT_RE = /^Datastore\/(?:statement|operation)\/MongoDB\//
// legacy endpoint enumerations
var DB_OPS = [
'addUser',
'authenticate',
'collection',
'collectionNames',
'collections',
'command',
'createCollection',
'createIndex',
'cursorInfo',
'dereference',
'dropCollection',
'dropDatabase',
'dropIndex',
'ensureIndex',
'eval',
'executeDbAdminCommand',
'indexInformation',
'logout',
'open',
'reIndex',
'removeUser',
'renameCollection',
'stats',
'_executeInsertCommand',
'_executeQueryCommand'
]
var COLLECTION_OPS = [
'aggregate',
'bulkWrite',
'count',
'createIndex',
'deleteMany',
'deleteOne',
'distinct',
'drop',
'dropAllIndexes',
'dropIndex',
'ensureIndex',
'findAndModify',
'findAndRemove',
'findOne',
'findOneAndDelete',
'findOneAndReplace',
'findOneAndUpdate',
'geoHaystackSearch',
'geoNear',
'group',
'indexes',
'indexExists',
'indexInformation',
'insert',
'insertMany',
'insertOne',
'isCapped',
'mapReduce',
'options',
'parallelCollectionScan',
'reIndex',
'remove',
'rename',
'replaceOne',
'save',
'stats',
'update',
'updateMany',
'updateOne'
]
var GRID_OPS = [
'put',
'get',
'delete'
]
var CURSOR_OPS = [
'nextObject',
'next',
'toArray',
'count',
'explain'
]
module.exports = initialize
function initialize(agent, mongodb) {
if (!mongodb) return
var tracer = agent.tracer
var moduleNameToWrapFunction = {
'GridStore': wrapGrid,
'OrderedBulkOperation': wrapQuery,
'UnorderedBulkOperation': wrapQuery,
'CommandCursor': wrapQuery,
'AggregationCursor': wrapQuery,
'Cursor': wrapQuery,
'Collection': wrapQuery,
'Db': wrapDb
}
function instrumentModules(err, instrumentations) {
if (err) {
logger.trace('Unable to instrument mongo using the apm api due to error: %s', err)
// fallback to legacy instrumentation?
return
}
instrumentations.forEach(instrumentModule)
}
function applyInstrumentation(objectName, object, instrumentation) {
var methods = instrumentation.methods
var methodOptions = instrumentation.options
if (methodOptions.callback) {
for (var j = 0; j < methods.length; j++) {
var method = methods[j]
var wrapFunction
if (method === 'each') {
wrapFunction = wrapEach
} else {
wrapFunction = moduleNameToWrapFunction[objectName]
}
if (wrapFunction) {
shimmer.wrapMethod(
object.prototype,
'mongodb.' + objectName + '.' + method,
method,
wrapFunction
)
} else {
logger.trace('No wrapping method found for %s', objectName)
}
}
}
}
function instrumentModule(module) {
var object = module.obj
var instrumentations = module.instrumentations
for (var i = 0; i < instrumentations.length; i++) {
applyInstrumentation(module.name, object, instrumentations[i])
}
}
// instrument using the apm api
if (mongodb.instrument) {
var instrumenter = mongodb.instrument({}, instrumentModules)
instrumenter.on('started', function onMongoEventStarted(evnt) {
// This assumes that this `started` event is fired _after_ our wrapper
// starts and creates the segment. We perform a check of the segment name
// out of an excess of caution.
var segment = tracer.getSegment()
var connId = evnt.connectionId
if (connId && segment && MONGO_SEGMENT_RE.test(segment.name)) {
logger.trace('Adding db instance attributes to segment %j', segment.name)
// Mongo sticks the path to the domain socket in the "host" slot, but we
// want it in the "port", so if we have a domain socket we need to change
// the order of our parameters.
if (connId.domainSocket) {
segment.captureDBInstanceAttributes('localhost', connId.host, evnt.databaseName)
} else {
segment.captureDBInstanceAttributes(connId.host, connId.port, evnt.databaseName)
}
} else {
logger.trace(
'Not adding db instance metric attributes to segment %j',
segment && segment.name
)
}
})
return
}
// fallback to legacy enumerations
if (mongodb.Cursor && mongodb.Cursor.prototype) {
// should wrapup stream aswell
shimmer.wrapMethod(
mongodb.Cursor.prototype,
'mongodb.Cursor.prototype',
CURSOR_OPS,
wrapQuery
)
shimmer.wrapMethod(
mongodb.Cursor.prototype,
'mongodb.Cursor.prototype',
'each',
wrapEach
)
}
if (mongodb.Collection && mongodb.Collection.prototype) {
shimmer.wrapMethod(
mongodb.Collection.prototype,
'mongodb.Cursor.prototype',
COLLECTION_OPS,
wrapQuery
)
}
if (mongodb.Grid && mongodb.Grid.prototype) {
shimmer.wrapMethod(
mongodb.Grid.prototype,
'mongodb.Grid.prototype',
GRID_OPS,
wrapGrid
)
}
if (mongodb.Db && mongodb.Db.prototype) {
for (var i = 0, l = DB_OPS.length; i < l; ++i) {
shimmer.wrapMethod(
mongodb.Db.prototype,
'mongodb.Db.prototype',
DB_OPS[i],
wrapDb
)
}
shimmer.wrapMethod(mongodb.Db, 'mongodb.Db', 'connect', wrapDb)
}
function wrapOp(original, name, wrapper) {
return function wrapped() {
var args = tracer.slice(arguments)
var last = args.length - 1
var callback = typeof args[last] === 'function' ? args[last] : null
var transaction = tracer.getTransaction()
var collection = this.collectionName || 'unknown'
if (this.collection && this.collection.collectionName) {
collection = this.collection.collectionName
} else if (this.s && this.s.name) {
collection = this.s.name || collection
} else if (this.ns) {
collection = this.ns.split(/\./)[1] || collection
}
if (!callback) {
logger.trace(
'Not tracing MongoDB %s.%s(); no callback.',
collection,
name
)
return original.apply(this, args)
} else if (!transaction) {
logger.trace(
'Not tracing MongoDB %s.%s(); no New Relic transaction.',
collection,
name
)
return original.apply(this, args)
} else if (inMongoSegment(tracer)) {
logger.trace(
'Not tracing MongoDB %s.%s(); Already in a mongo segment',
collection,
name
)
return original.apply(this, args)
}
return wrapper.call(this, args, last, collection)
}
}
function wrapQuery(original, opName) {
return wrapOp(original, opName, function wrappedQuery(args, last, collection) {
var segment = addMongoStatement(tracer, collection, opName)
var callback = args[last]
logger.trace(
'Tracing MongoDB %s.%s().',
collection,
opName
)
// capture configuration information if available
captureInstanceAttributes(segment, this)
args[last] = tracer.wrapCallback(callback, segment, function wrappedCallback() {
segment.touch()
logger.trace('Tracing MongoDB %s.%s() ended.', collection, opName)
return callback.apply(this, arguments)
})
return tracer.bindFunction(original, segment).apply(this, args)
})
}
function wrapEach(original, opName) {
return wrapOp(original, opName, function wrappedEach(args, last, collectionName) {
var segment = addMongoStatement(tracer, collectionName, opName)
var callbackBatch = null
var callback = args[last]
var collection = this
logger.trace('Tracing MongoDB %s.%s().', collection, opName)
// capture configuration information if available
captureInstanceAttributes(segment, this)
args[args.length - 1] = wrappedCallback
return tracer.bindFunction(original, segment).apply(this, args)
function wrappedCallback(err, item) {
segment.touch()
if (err || item === null) {
logger.trace('Tracing MongoDB %s.%s(%s) ended.', collection, opName)
}
if (!callbackBatch) {
callbackBatch = tracer.wrapCallback(
callback,
segment,
function wrapBatch() {
if (!collection.items || !collection.items.length) {
callbackBatch = null
}
return callback.apply(this, arguments)
}
)
}
return callbackBatch.apply(this, arguments)
}
})
}
function wrapGrid(original, opName) {
return wrapOp(original, opName, function wrappedGridOp(args, last) {
var name = MONGODB.OPERATION + 'GridFS-' + opName
var callback = args[last]
var grid = this
// TODO: should look into adding a recorder for this
return tracer.addSegment(name, null, null, false, segmentWrapper)
function segmentWrapper(segment) {
args[last] = tracer.wrapCallback(callback, segment, nrCallbackWrap)
return original.apply(grid, args)
function nrCallbackWrap() {
segment.touch()
logger.trace('Tracing MongoDB Grid.%s() ended.', opName)
return callback.apply(this, arguments)
}
}
})
}
function wrapDb(original, opName) {
return wrapOp(original, opName, function wrappedGridOp(args, last) {
var name = MONGODB.OPERATION + opName
var callback = args[last]
var db = this
// TODO: should look into adding a recorder for this
return tracer.addSegment(name, null, null, false, segmentWrapper)
function segmentWrapper(segment) {
args[last] = tracer.wrapCallback(callback, segment, nrCallbackWrap)
return tracer.bindFunction(original, segment).apply(db, args)
function nrCallbackWrap() {
segment.touch()
logger.trace('Tracing MongoDB %s() ended.', opName)
return callback.apply(this, arguments)
}
}
})
}
}
var MONGO_RE = new RegExp(
'^(?:' + MONGODB.STATEMENT + ')|(?:' + MONGODB.OPERATION + ')'
)
function inMongoSegment(tracer) {
return MONGO_RE.test(tracer.getSegment().name)
}
function addMongoStatement(tracer, collection, opName) {
var statement = new ParsedStatement(MONGODB.PREFIX, opName, collection)
var recorder = statement.recordMetrics.bind(statement)
var name = MONGODB.STATEMENT + collection + '/' + opName
var segment = tracer.createSegment(name, recorder)
segment.start()
return segment
}
function captureInstanceAttributes(segment, obj) {
if (obj.db && obj.db.serverConfig) {
logger.trace('Adding datastore instance attributes from obj.db.serverConfig')
var databaseName = (
obj.db.serverConfig.db || obj.db.serverConfig.dbInstance || {}
).databaseName
doCapture(obj.db.serverConfig, databaseName)
} else {
logger.trace('Could not find datastore instance attributes.')
}
function doCapture(conf, database) {
var host = conf.host
var port = conf.port
// If using a domain socket, mongo stores the path as the host name, but we
// pass it through the port value.
if (conf.socketOptions && conf.socketOptions.domainSocket) {
port = host
host = 'localhost'
}
segment.captureDBInstanceAttributes(host, port, database)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 | 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'mysql'})
var shimmer = require('../shimmer')
var parseSql = require('../db/parse-sql')
var MYSQL = require('../metrics/names').MYSQL
var dbutil = require('../db/util')
module.exports = function initialize(agent, mysql) {
var tracer = agent.tracer
function wrapQueriable(queriable, name, poolQuery) {
// may not always be a queriable object, but anything with a .query
// you should pass the appropriate name in for shimmer
if (!queriable) {
return
}
shimmer.wrapMethod(
queriable,
name,
'query',
function nrQueryWrapper(original) {
return tracer.wrapFunction(
poolQuery ? 'MySQL pool.query' : MYSQL.STATEMENT + 'Unknown',
null,
original,
cb_wrapMethod, // wrap arguments
bindStreamingEvents // wrap return value
)
}
)
// we bind the streaming event emitters to track the query's
// progress update the query's segment.
function bindStreamingEvents(segment, queryObject) {
if (queryObject && queryObject.emit) {
queryObject.emit = tracer.bindFunction(queryObject.emit, segment, true)
}
return queryObject
}
var databaseName = null
function cb_wrapMethod(segment, args, bind) {
var sqlString = ''
// This is just a massive argument hunt
// because you can call .query in many ways.
//
// You should populate `userCallback` after this block with a callback.
// Optionally you may populate `queryVals` and `sqlString`.
// The value in `sqlString` will show up in the UI
var vargs = []
if (args.length === 1 && typeof args[0] === 'object') {
// .query(query)
// query query is a Query object and contains ._callback and .sql
args[0]._callback = bind(args[0]._callback)
sqlString = args[0].sql
vargs.push(args[0])
} else if (args.length === 1) {
// either .query(callback) or .query(sql)
// in the latter case we rely on the streaming interface
if (typeof args[0] !== 'function') {
sqlString = args[0].sql
vargs.push(args[0])
} else {
vargs.push(bind(args[0]))
}
} else if (args.length === 2) {
// .query(sql, callback) or .query(sql, values)
// in the latter case we rely on the streaming interface
vargs.push(sqlString = args[0])
if (typeof args[1] !== 'function') {
vargs.push(args[1])
} else {
vargs.push(bind(args[1]))
}
} else {
// .query(sql, values, callback) or unknown
// in the latter case, we just omit measuring
vargs.push(sqlString = args[0])
vargs.push(args[1])
if (typeof args[2] !== 'function') {
vargs.push(args[2])
vargs.push(segment.touch.bind(segment))
} else {
vargs.push(bind(args[2]))
}
}
// name the metric
if (!poolQuery) {
var ps = parseSql(MYSQL.PREFIX, sqlString)
var model = ps.model
var operation = ps.operation
var segmentName = MYSQL.STATEMENT + (model || 'unknown') + '/' + operation
logger.trace(
'capturing mysql query in %s. model: %s, Operation: %s',
name,
model,
operation
)
// we will end the segment in onEnd above
tracer.getTransaction().addRecorder(ps.recordMetrics.bind(ps, segment))
segment.name = segmentName
if (queriable.config && queriable.config.connectionConfig) {
var queryConfig = queriable.config.connectionConfig
} else if (queriable.config) {
var queryConfig = queriable.config
}
if (queryConfig) {
databaseName = databaseName || queryConfig.database
if (queryConfig.socketPath) {
// in the unix domain socket case we force the host to be
// localhost
segment.captureDBInstanceAttributes(
'localhost',
queryConfig.socketPath,
databaseName
)
} else {
segment.captureDBInstanceAttributes(
queryConfig.host,
queryConfig.port,
databaseName
)
}
} else {
logger.trace('No query config detected, not collecting db instance data')
}
databaseName = dbutil.extractDatabaseChangeFromUse(sqlString) || databaseName
}
return vargs
}
}
function getVargs(args) {
var callback
var vargs = []
switch (args.length) {
case 1:
callback = args[0]
break
case 2:
vargs.push(args[0])
callback = args[1]
break
default:
vargs.push(args[0])
vargs.push(args[1])
callback = args[2]
break
}
logger.trace({args: args, vargs: vargs}, 'parsed getConnection arguments')
return {
vargs: vargs,
callback: callback
}
}
function getConnectionHandler(dbObject, getConnectionMethod) {
return function wrap_getConnection() { // getConnection
var args = getVargs(arguments)
var getConnectionCallback
// let's verify that we actually have a callback,
// otherwise we should just pass on wrapping it
//
// TODO: test case where no callback is supplied
var isCallback = args.callback && typeof args.callback === 'function'
// The mysql module has internal retry logic that will call
// getConnection again with our wrapped callback.
// We should avoid re-wrapping the callback when possible,
// although nothing bad happens when we fail this, it just
// makes stack traces a little better in errors.
if (!isCallback || !args.callback.__NR_original_callback) {
var proxiedCallback = tracer.bindFunction(args.callback)
getConnectionCallback = function getConnectionCallback(err, connection) {
// we need to patch the connection objects .query method
wrapQueriable(connection, 'connection')
proxiedCallback(err, connection)
}
// tag so we can avoid re-wrapping
getConnectionCallback.__NR_original_callback = args.callback
} else {
// the connection is already wrapped
logger.trace('getConnection callback already wrapped')
getConnectionCallback = args.callback
}
args.vargs.push(getConnectionCallback)
return getConnectionMethod.apply(dbObject, args.vargs)
}
}
// FIXME: need a more general way of differentiating between driver versions
if (mysql && mysql.createConnection) {
// congratulations, you have node-mysql 2.0
shimmer.wrapMethod(mysql, 'mysql.prototype', 'createPoolCluster',
function cb_wrapMethod(createPoolCluster) {
// this is generally called outside of a transaction,
// so we don't need/care about preserving
// the continuation, but we do need to patch the returned object
return function not_in_transaction() {
var poolCluster = createPoolCluster.apply(mysql, arguments)
shimmer.wrapMethod(poolCluster, 'poolCluster', 'of',
function cb_wrapMethod(of) {
return function nrWrappedMethod() {
var ofCluster = of.apply(poolCluster, arguments)
shimmer.wrapMethod(ofCluster, 'poolCluster', 'getConnection',
function cb_wrapMethod(getConnection) {
return getConnectionHandler(ofCluster, getConnection)
})
return ofCluster
}
})
shimmer.wrapMethod(poolCluster, 'poolCluster', 'getConnection',
function cb_wrapMethod(getConnection) {
return getConnectionHandler(poolCluster, getConnection)
})
return poolCluster
}
})
shimmer.wrapMethod(mysql, 'mysql', 'createPool',
function cb_wrapMethod(createPool) {
return function cb_wrapFunction() {
var pool = createPool.apply(mysql, arguments)
shimmer.wrapMethod(pool, 'pool', 'getConnection',
function cb_wrapMethod(getConnection) {
return getConnectionHandler(pool, getConnection)
})
// patch the pools .query method
wrapQueriable(pool, 'pool', true)
return pool
}
})
shimmer.wrapMethod(
mysql,
'mysql',
'createConnection',
function cb_wrapMethod(createConnection) {
return function wrappedCreateConnection() {
var connection = createConnection.apply(this, arguments)
wrapQueriable(connection, 'connection')
return connection
}
}
)
} else if (mysql && mysql.Client) {
// congratulations, you have node-mysql 0.9
shimmer.wrapMethod(
mysql && mysql.Client && mysql.Client.prototype,
'mysql.Client.prototype',
'query',
function nrQueryWrapper(original) {
return tracer.wrapFunction(
MYSQL.STATEMENT + 'Unknown',
null,
original,
wrapQuery09
)
}
)
}
var databaseName09 = null
function wrapQuery09(segment, args, bind) {
var transaction = tracer.getTransaction()
var sqlString = args[0]
var ps = parseSql(MYSQL.PREFIX, sqlString)
transaction.addRecorder(ps.recordMetrics.bind(ps, segment))
segment.name = MYSQL.STATEMENT + (ps.model || 'unknown') + '/' + ps.operation
// capture connection info for datastore instance metric
databaseName09 = databaseName09 || this.database
if (this.socketPath) {
// in the unix domain socket case we force the host to be
// localhost
segment.captureDBInstanceAttributes(
agent.config.getHostnameSafe(),
this.socketPath,
databaseName09
)
} else {
segment.captureDBInstanceAttributes(
this.host,
this.port,
databaseName09
)
}
databaseName09 = dbutil.extractDatabaseChangeFromUse(sqlString) || databaseName09
// find and wrap the callback
if (args.length > 1 && typeof args[args.length - 1] === 'function') {
args[args.length - 1] = bind(args[args.length - 1])
}
// FIXME: need to grab error events as well, as they're also emitted on
// the client
return args
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var record = require('../metrics/recorders/cassandra.js')
var CASSANDRA = require('../metrics/names').CASSANDRA
var INSTRUMENTED_OPERATIONS = [
'execute',
'executeAsPrepared',
'executeBatch'
]
module.exports = function initialize(agent, cassandracql) {
var tracer = agent.tracer
INSTRUMENTED_OPERATIONS.forEach(function cb_forEach(operation) {
shimmer.wrapMethod(
cassandracql && cassandracql.Client && cassandracql.Client.prototype,
'node-cassandra-cql.Client.prototype',
operation,
function wrapOperation(original) {
return tracer.wrapFunction(
CASSANDRA.OPERATION + operation,
record,
original,
wrapper
)
}
)
function wrapper(segment, args, bind) {
var position = args.length - 1
var last = args[position]
// capture connection info for datastore instance metric
segment.port = this.port
segment.host = this.host
if (typeof last === 'function') {
args[position] = bind(last, true, true)
} else if (Array.isArray(last) && typeof last[last.length - 1] === 'function') {
last[last.length - 1] = tracer.bindFunction(
bind(last[last.length - 1], true, true)
)
} else { // let's shove a callback in there for fun
args.push(bind(null, null))
}
return args
}
})
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'oracle'})
var shimmer = require('../shimmer')
var parseSql = require('../db/parse-sql')
var ORACLE = require('../metrics/names').ORACLE
module.exports = function initialize(agent, oracle) {
var tracer = agent.tracer
var wrapped = false
logger.trace('wrapping oracle.connect and oracle.connectSync')
shimmer.wrapMethod(oracle, 'Oracle', 'connect', function cb_wrapMethod(connect) {
return function wrappedConnect(connectData, cb) {
return connect.call(
this,
connectData,
tracer.bindFunction(wrapConnection)
)
function wrapConnection(err, connection) {
if (!err) ensureConnectionWrapped(connection)
return cb(err, connection)
}
}
})
shimmer.wrapMethod(oracle, 'Oracle', 'connectSync', function wrapSyncConnect(connect) {
return function wrappedSyncConnect() {
var connection = connect.apply(this, arguments)
ensureConnectionWrapped(connection)
return connection
}
})
function ensureConnectionWrapped(connection) {
// return early in case called from an async connect after wrapping
if (wrapped) return
logger.trace('wrapping oracle connection prototype')
wrapped = true
oracle.connectSync.__NR_unwrap()
oracle.connect.__NR_unwrap()
shimmer.wrapMethod(oracle, 'Oracle', 'connect', function wrapMethod(connect) {
return tracer.wrapFunctionNoSegment(connect, 'connect')
})
var proto = Object.getPrototypeOf(connection)
wrapConnectionExecute(proto, tracer)
wrapConnectionPrepare(proto, tracer)
shimmer.wrapMethod(proto, 'Oracle', 'reader', function wrapMethod(createReader) {
return function wrappedConnect(sql) {
var reader = createReader.apply(this, arguments)
wrapReader(reader, tracer, sql)
return reader
}
})
}
}
function wrapConnectionExecute(connection, tracer) {
shimmer.wrapMethod(connection, 'Oracle.connection', 'execute', wrapExecute)
function wrapExecute(execute) {
return tracer.wrapFunction(ORACLE.STATEMENT + 'other/', null, execute, wrappedExecute)
}
function wrappedExecute(segment, args, bind) {
var ps = parseSql(ORACLE.PREFIX, args[0])
var model = ps.model
var operation = ps.operation
segment.name = ORACLE.STATEMENT + model + '/Connection.execute/' + operation
logger.trace(
'capturing oracle query. model: %s, Operation: %s',
model,
operation
)
segment.transaction.addRecorder(ps.recordMetrics.bind(ps, segment))
args[2] = bind(args[2])
return args
}
}
function wrapReader(reader, tracer, sql) {
var ps = parseSql(ORACLE.PREFIX, sql)
shimmer.wrapMethod(reader, 'Oracle.Reader', 'nextRow', wrapNextRow)
shimmer.wrapMethod(reader, 'Oracle.Reader', 'nextRows', wrapNextRows)
function wrapNextRow(nextRow) {
return tracer.wrapFunctionLast(
ORACLE.STATEMENT + ps.model + '/Reader.nextRow/' + ps.operation,
ps.recordMetrics.bind(ps),
nextRow
)
}
function wrapNextRows(nextRows) {
return tracer.wrapFunctionLast(
ORACLE.STATEMENT + ps.model + '/Reader.nextRows/' + ps.operation,
ps.recordMetrics.bind(ps),
nextRows
)
}
}
function wrapConnectionPrepare(connection, tracer) {
shimmer.wrapMethod(connection, 'Oracle.connection', 'prepare', wrapPrepare)
function wrapPrepare(prepare) {
return function wrappedPrepare(sql) {
var ps = parseSql(ORACLE.PREFIX, sql)
var prepared = prepare.apply(this, arguments)
shimmer.wrapMethod(prepared, 'Oracle', 'execute', wrapExecute)
return prepared
function wrapExecute(execute) {
return tracer.wrapFunctionLast(
ORACLE.STATEMENT + ps.model + '/Statement.execute/' + ps.operation,
ps.recordMetrics.bind(ps),
execute
)
}
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var logger = require('../logger').child({component: 'pg'})
var parseSql = require('../db/parse-sql')
var POSTGRES = require('../metrics/names').POSTGRES
var util = require('util')
// Adds a segment
// The `config` argument is either a statement string template or a pg statement
// config object with a `text` property holding the statement string template.
function initializeSegment(tracer, segment, client, config) {
var statement
if (config && (typeof config === 'string' || config instanceof String)) {
statement = config
} else if (config && config.text) {
statement = config.text
} else {
// Won't be matched by parser, but should be handled properly
statement = 'Other'
}
var ps = parseSql(POSTGRES.PREFIX, statement)
var model = ps.model
var operation = ps.operation
segment.name = POSTGRES.STATEMENT + (model || 'other') + '/' + operation
segment.captureDBInstanceAttributes(client.host, client.port, client.database)
logger.trace(
'capturing postgresql query. model: %s, operation: %s',
model,
operation
)
tracer.getTransaction().addRecorder(ps.recordMetrics.bind(ps, segment))
}
module.exports = function initialize(agent, pgsql) {
if (!pgsql) return
var tracer = agent.tracer
// allows for native wrapping to not happen if not necessary
// when env var is true
if (process.env.NODE_PG_FORCE_NATIVE) {
return instrumentPGNative('pg', pgsql)
}
// The pg module defines "native" getter which sets up the native client lazily
// (only when called). We replace the getter, so that we can instrument the native
// client. The original getter replaces itself with the instance of the native
// client, so only instrument if the getter exists (otherwise assume already
// instrumented).
var origGetter = pgsql.__lookupGetter__('native')
if (origGetter) {
delete pgsql.native
pgsql.__defineGetter__('native', function getNative() {
var temp = origGetter()
instrumentPGNative('pg.native', temp)
return temp
})
}
// wrapping for native
function instrumentPGNative(eng, pg) {
shimmer.wrapMethod(pg, 'pg', 'Client', wrapClient)
shimmer.wrapMethod(pg.pools, 'pg.pools', 'Client', wrapClient)
shimmer.wrapMethod(pg, 'pg', 'Pool', wrapPool)
function newApply(Cls) {
return new (Cls.bind.apply(Cls, arguments))()
}
function wrapPool(Pool) {
if (shimmer.isWrapped(Pool)) {
return Pool
}
util.inherits(WrappedPool, Pool)
WrappedPool.__NR_original = Pool
return WrappedPool
/* eslint-disable no-unused-vars */
function WrappedPool(options, Client) {
/* eslint-enable no-unused-vars */
if (!(this instanceof WrappedPool)) {
return newApply(WrappedPool, arguments)
}
Pool.apply(this, arguments)
this.Client = wrapClient(this.Client)
}
}
function wrapClient(Client) {
if (shimmer.isWrapped(Client)) {
return Client
}
util.inherits(WrappedClient, Client)
Object.keys(Client).forEach(function forEachClientKey(k) {
WrappedClient[k] = Client[k]
})
WrappedClient.__NR_original = Client
return WrappedClient
// -------------------------------------------------------------------- //
/* eslint-disable no-unused-vars */
function WrappedClient(options) {
/* eslint-enable no-unused-vars */
// NOTE: This is an instance of PG's `Client` class, _not_ its
// `Connection` class. This is an important distinction as the
// latter does not have the host/port/database meta data.
// Apply the constructor. JavaScript really needs a better way to do this.
// This logic is the same as `newApply`, however for some reason `newApply`
// does not work on Node v0.8 or v0.10. For `WrappedPool` this doesn't
// matter since the versions of PG it is in don't support those ancient
// versions of Node either, but here we must do it ourselves.
var args = tracer.slice(arguments)
args.unshift(Client) // `unshift` === `push_front`
var client = new (Client.bind.apply(Client, args))()
// Wrap the methods we care about.
shimmer.wrapMethod(client, 'Client', 'connect', wrapConnect)
shimmer.wrapMethod(client, 'Client', 'query', wrapNativeQuery)
return client
}
function wrapConnect(connect) {
return function wrappedConnect(callback) {
if (typeof callback === 'function') {
callback = tracer.bindFunction(callback)
}
return connect.call(this, callback)
}
}
function wrapNativeQuery(original) {
return tracer.wrapFunction(
POSTGRES.STATEMENT + 'Unknown',
null,
original,
nativeQueryWrapper,
nativeResponseWrapper
)
}
function nativeQueryWrapper(segment, args, bindCallback) {
initializeSegment(tracer, segment, this, args[0])
var pos = args.length - 1
var last = args[pos]
// Proxy callback in case they start new segments
args[pos] = bindCallback(last)
return args
}
function nativeResponseWrapper(segment, result, bindCallback) {
// Wrap end and error events too, in case they start new segments
// within them. Use end and error events to end segments.
result.on('error', end)
result.on('end', end)
function end() {
segment.touch()
logger.trace(
'postgres command trace segment ended by event for transaction %s.',
segment.transaction.id
)
}
// TODO: Maybe .on and .addListener shouldn't be different
// Proxy events too, in case they start new segments within handlers
shimmer.wrapMethod(result, 'query.on', 'on', function queryOnWrapper(on) {
return function queryOnWrapped() {
if (arguments[1]) {
if (arguments[0] === 'end' || arguments[0] === 'error') {
arguments[1] = bindCallback(arguments[1])
} else {
arguments[1] = tracer.bindFunction(arguments[1], segment, true)
}
}
return on.apply(this, arguments)
}
})
shimmer.wrapMethod(
result,
'query.addListener',
'addListener',
queryAddListenerWrapper
)
function queryAddListenerWrapper(addL) {
return function queryAddListenerWrapped() {
if (arguments[1]) {
if (arguments[0] === 'end' || arguments[0] === 'error') {
arguments[1] = bindCallback(arguments[1])
} else {
arguments[1] = tracer.bindFunction(arguments[1], segment, true)
}
}
addL.apply(this, arguments)
}
}
return result
}
}
}
// wrapping for JS
shimmer.wrapMethod(
pgsql && pgsql.Client && pgsql.Client.prototype,
'pg.Client.prototype',
'query',
wrapQuery
)
function wrapQuery(original) {
return tracer.wrapFunction(
POSTGRES.STATEMENT + 'Unknown',
null,
original,
queryWrapper,
responseWrapper
)
}
function queryWrapper(segment, args, bindCallback) {
var position = args.length - 1
var last = args[position]
initializeSegment(tracer, segment, this, args[0])
// Proxy callbacks in case they start new segments
if (typeof last === 'function') {
args[position] = bindCallback(last, true, true)
} else if (Array.isArray(last) && typeof last[last.length - 1] === 'function') {
var callback = last[last.length - 1]
last[last.length - 1] = bindCallback(callback)
}
return args
}
function responseWrapper(segment, query, bindCallback) {
// Use end and error events to end segments
query.on('error', end)
query.on('end', end)
function end() {
segment.end()
logger.trace(
'postgres command trace segment ended by event for transaction %s.',
segment.transaction.id
)
}
// Proxy events too, in case they start new segments within handlers
shimmer.wrapMethod(query, 'query.on', 'on', function queryOnWrapper(on) {
return function queryOnWrapped() {
if (arguments[1]) {
if (arguments[0] === 'end' || arguments[0] === 'error') {
arguments[1] = bindCallback(arguments[1])
} else {
arguments[1] = tracer.bindFunction(arguments[1], segment, true)
}
}
return on.apply(this, arguments)
}
})
shimmer.wrapMethod(query, 'query.addListener', 'addListener', addListenerWrapper)
function addListenerWrapper(addL) {
return function wrappedAddListener() {
if (arguments[1]) {
if (arguments[0] === 'end' || arguments[0] === 'error') {
arguments[1] = bindCallback(arguments[1])
} else {
arguments[1] = tracer.bindFunction(arguments[1], segment, true)
}
}
addL.apply(this, arguments)
}
}
return query
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger')
var util = require('util')
var shimmer = require('../shimmer')
/**
* @namespace Library.Spec
*
* @property {string} name
* The name of this promise library.
*
* @property {?string} constructor
* Optional. The name of the property that is the Promise constructor. Default
* is to use the library itself as the Promise constructor.
*
* @property {?bool} executor
* Optional. If true, the Promise constructor itself will be wrapped for the
* executor. If false then `_proto`, `_static`, or `_library` must have an
* `executor` field whose value is the name of the executor function. Default
* is false.
*
* @property {Library.Spec.Mapping} $proto
* The mapping for Promise instance method concepts (i.e. `then`). These are
* mapped on the Promise class' prototype.
*
* @property {Library.Spec.Mapping} $static
* The mapping for Promise static method concepts (i.e. `all`, `race`). These
* are mapped on the Promise class itself.
*
* @property {?Library.Spec.Mapping} $library
* The mapping for library-level static method concepts (i.e. `fcall`, `when`).
* These are mapped on the library containing the Promise class. NOTE: in most
* promise implementations, the Promise class is itself the library thus this
* property is unnecessary.
*/
/**
* @namespace Library.Spec.Mapping
*
* A mapping of promise concepts (i.e. `then`) to this library's implementation
* name(s) (i.e. `["then", "chain"]`). Each value can by either a single string
* or an array of strings if the concept exists under multiple keys. If any
* given concept doesn't exist in this library, it is simply skipped.
*
* @property {array} $copy
* An array of properties or methods to just directly copy without wrapping.
* This field only matters when `Library.Spec.executor` is `true`.
*
* @property {string|array} executor
*
*
* @property {string|array} then
*
*
* @property {string|array} all
*
*
* @property {string|array} race
*
*
* @property {string|array} resolve
* Indicates methods to wrap which are resolve factories. This method only
* requires wrapping if the library doesn't use an executor internally to
* implement it.
*
* @property {string|array} reject
* Indicates methods to wrap which are reject factories. Like `resolve`, this
* method only requires wrapping if the library doesn't use an executor
* internally to implement it.
*/
/**
* Instruments a promise library.
*
* @param {Agent} agent - The New Relic APM agent.
* @param {function} library - The promise library.
* @param {?Library.Spec} spec - Spec for this promise library mapping.
*/
module.exports = function initialize(agent, library, spec) {
// Wrap library-level methods.
wrapStaticMethods(library, spec.name, spec.$library)
// Wrap prototype methods.
var Promise = library[spec.constructor]
wrapPrototype(Promise.prototype)
wrapStaticMethods(Promise, spec.constructor, spec.$static)
// See if we are wrapping the class itself.
if (spec.executor) {
shimmer.wrapMethod(library, spec.name, spec.constructor, wrapPromise)
}
/**
* Wraps the Promise constructor as the executor.
*/
function wrapPromise() {
// Copy all unwrapped properties over.
if (spec.$static && spec.$static.$copy) {
spec.$static.$copy.forEach(function copyKeys(key) {
if (!wrappedPromise[key]) {
wrappedPromise[key] = Promise[key]
}
})
}
var passThrough = spec.$static && spec.$static.$passThrough
if (passThrough) {
passThrough.forEach(function assignProxy(proxyProp) {
if (!wrappedPromise.hasOwnProperty(proxyProp)) {
Object.defineProperty(wrappedPromise, proxyProp, {
enumerable: true,
configurable: true,
get: function getOriginal() {
return Promise[proxyProp]
},
set: function setOriginal(newValue) {
Promise[proxyProp] = newValue
}
})
}
})
}
// Inherit to pass `instanceof` checks.
util.inherits(wrappedPromise, Promise)
// Make the wrapper.
return wrappedPromise
function wrappedPromise(executor) {
if (!(this instanceof wrappedPromise)) {
return Promise(executor) // eslint-disable-line new-cap
}
var promise = null
if (typeof executor !== 'function' || arguments.length !== 1) {
// we are expecting one function argument for executor, anything else is
// non-standard, so not attempting to wrap
var cnstrctArgs = agent.tracer.slice(arguments)
cnstrctArgs.unshift(Promise) // `unshift` === `push_front`
promise = new (Promise.bind.apply(Promise, cnstrctArgs))()
} else {
var segmentName = 'Promise ' + (executor.name || '<anonymous>')
var context = {
promise: null,
self: null,
args: null
}
promise = new Promise(wrapExecutorContext(context))
context.promise = promise
_setInternalProperty(promise, '__NR_segment', _createSegment(segmentName))
try {
// Must run after promise is defined so that `__NR_wrapper` can be set.
executor.apply(context.self, context.args)
} catch (e) {
context.args[1](e)
}
}
// The Promise must be created using the "real" Promise constructor (using
// normal Promise.apply(this) method does not work). But the prototype
// chain must include the wrappedPromise.prototype, V8's promise
// implementation uses promise.constructor to create new Promises for
// calls to `then`, `chain` and `catch` which allows these Promises to
// also be instrumented.
promise.__proto__ = wrappedPromise.prototype // eslint-disable-line no-proto
return promise
}
}
function wrapPrototype(PromiseProto, name) {
// Don't wrap the proto if there is no spec for it.
if (!spec.$proto) {
return
}
name = name || (spec.constructor + '.prototype')
// Wrap up instance methods.
_safeWrap(PromiseProto, name, spec.$proto.executor, wrapExecutorCaller)
_safeWrap(PromiseProto, name, spec.$proto.then, wrapThen)
_safeWrap(PromiseProto, name, spec.$proto.catch, wrapCatch)
}
function wrapStaticMethods(lib, name, staticSpec) {
// Don't bother with empty specs.
if (!staticSpec) {
return
}
_safeWrap(lib, name, staticSpec.cast, wrapCast)
}
function wrapExecutorCaller(caller) {
return function wrappedExecutorCaller(executor) {
if (!(this instanceof Promise)) {
return caller.apply(this, arguments)
}
var context = {
promise: this,
self: null,
args: null
}
if (!this.__NR_segment) {
var segmentName = 'Promise ' + executor.name || '<anonymous>'
_setInternalProperty(this, '__NR_segment', _createSegment(segmentName))
}
var args = [].slice.call(arguments)
args[0] = wrapExecutorContext(context, this.__NR_segment)
var ret = caller.apply(this, args)
// Bluebird catches executor errors and auto-rejects when it catches them,
// thus we need to do so as well.
//
// When adding new libraries, make sure to check that they behave the same
// way. We may need to enhance the promise spec to handle this variance.
try {
executor.apply(context.self, context.args)
} catch (e) {
context.args[1](e)
}
return ret
}
}
/**
* Creates a function which will export the context and arguments of its
* execution.
*
* @param {object} context - The object to export the execution context with.
*
* @return {function} A function which, when executed, will add its context
* and arguments to the `context` parameter.
*/
function wrapExecutorContext(context, segment) {
return function contextExporter(resolve, reject) {
segment = segment || agent.tracer.segment
context.self = this
context.args = [].slice.call(arguments)
context.args[0] = wrappedResolve
context.args[1] = wrappedReject
// These wrappers create a function that can be passed a function and an
// argument to call as a continuation from the resolve or reject.
function wrappedResolve(val) {
var promise = context.promise
if (promise) {
linkChain(promise, promise.__NR_segment || segment, true)
if (promise.__NR_segment) {
promise.__NR_segment.touch()
}
}
return resolve(val)
}
function wrappedReject(val) {
var promise = context.promise
if (promise) {
linkChain(promise, promise.__NR_segment || segment, false)
if (promise.__NR_segment) {
promise.__NR_segment.touch()
}
}
return reject(val)
}
}
}
/**
* Brings the transaction through a promise to `then`ed continuations.
*
* @param {Promise} ctx The `this` argument for `fn`.
* @param {Function} fn The handler function
* @param {string} name The name function that added this link (i.e. then).
* @param {Promise} next Promise returned from calling `then`
* @param {Array} args Arguments passed into the `then` handler.
* @return {*} The value returned from the `then`ed function.
*/
function linkTransaction(ctx, fn, name, next, args) {
if (!next) {
return fn.apply(ctx, args)
}
// next needs to have a wrapper function even if the callback throws.
try {
if (!next.__NR_segment) {
var segmentName = 'Promise#' + name + ' ' + (fn.name || '<anonymous>')
_setInternalProperty(next, '__NR_segment', _createSegment(segmentName))
}
var segment = next.__NR_segment
var result = agent.tracer.bindFunction(fn, segment, true).apply(ctx, args)
} finally {
if (result instanceof Promise && result !== next) {
linkChain(next, segment, null, function proxyWrapper() {
if (segment) {
segment.touch()
}
var link = result.__NR_wrapper
if (!link) {
link = agent.tracer.bindFunction(linkTransaction, segment, true)
}
return link.apply(this, arguments)
})
} else {
// If we have a result, we know we didn't reject and can bound linking
// to just the next resolve handler.
// resolved : unknown
linkChain(next, segment, result !== undefined ? true : null)
}
}
return result
}
/**
* If the promise isn't already bound, this will bind it to the given segment.
*
* @param {Promise} promise - The promise to link with the segment.
* @param {TraceSegment} segment - The segment to link the promise with.
*/
function bindLink(promise, segment) {
if (!promise.__NR_wrapper) {
_setInternalProperty(
promise,
'__NR_wrapper',
agent.tracer.bindFunction(linkTransaction, segment, true)
)
}
}
/**
* Walks the promise chain, linking each one to the given segment.
*
* @param {Promise} promise
* The first promise in the chain to link with the segment.
*
* @param {TraceSegment} segment
* The segment to link the chain with.
*
* @param {?bool} [resolved]
* Flag indicating if we only need to wrap down to the next resolve handler.
* If true, linking will stop after the first resolve handler is found.
*
* @param {Function} [wrapper]
* The wrapper to use for the linking. If not provided then `linkTransaction`
* will be used as the wrapper.
*/
function linkChain(promise, segment, resolved, wrapper) {
if (!wrapper) {
wrapper = agent.tracer.bindFunction(linkTransaction, segment, true)
}
var next = promise
while (next instanceof Promise) {
_setInternalProperty(next, '__NR_wrapper', wrapper)
// If we resolved and this is the resolve handler, stop linking here.
if (resolved && next.__NR_resolveHandler) {
break
}
// Unfortunately we can't preemptively stop for reject since some Promise
// libraries support long jumps on rejection according to error class.
// Thanks bluebird!
// Break when there is an infinite loop.
if (next.__NR_nextPromise === next) {
break
}
next = next.__NR_nextPromise
}
}
/**
* Creates a wrapper for `Promise#then` that extends the transaction context.
*
* @return {function} A wrapped version of `Promise#then`.
*/
function wrapThen(then, name) {
return _wrapThen(then, name, true)
}
/**
* Creates a wrapper for `Promise#catch` that extends the transaction context.
*
* @return {function} A wrapped version of `Promise#catch`.
*/
function wrapCatch(cach, name) {
return _wrapThen(cach, name, false)
}
/**
* Creates a wrapper for promise chain extending methods.
*
* @param {function} then
* The function we are to wrap as a chain extender.
*
* @param {bool} useAllParams
* When true, all parameters which are functions will be wrapped. Otherwise,
* only the last parameter will be wrapped.
*
* @return {function} A wrapped version of the function.
*/
function _wrapThen(then, name, useAllParams) {
// Don't wrap non-functions.
if (!(then instanceof Function) || then.name === '__NR_wrappedThen') {
return then
}
return function __NR_wrappedThen() {
if (!(this instanceof Promise)) {
return then.apply(this, arguments)
}
var thenSegment = agent.tracer.getSegment()
var promise = this
// Wrap up the arguments and execute the real then.
var hasResolve = false
var args = [].map.call(arguments, wrapHandler)
var next = then.apply(this, args)
// Make sure we got a promise and then return it.
if (next instanceof Promise && next !== promise) {
_setInternalProperty(promise, '__NR_resolveHandler', hasResolve)
_setInternalProperty(promise, '__NR_nextPromise', next)
}
return next
// Wrap callbacks (success, error) so that the callbacks will be called as
// a continuations of the accept or reject call using the __asl__wrapper
// created above.
function wrapHandler(fn, i, arr) {
if (
!(fn instanceof Function) || // Not a function
fn.name === '__NR_wrappedThenHandler' || // Already wrapped
(!useAllParams && i !== (arr.length - 1)) // Don't want all and not last
) {
return fn
}
hasResolve = (hasResolve || (i === 0))
return function __NR_wrappedThenHandler() {
// Even though success/error handlers should have just one argument
// (value or error), internal implementations could be passing in more
// arguments.
if (!promise.__NR_wrapper) {
// The currently running segment is the least likely to be the
// correct one when working with Bluebird due to the way it queues
// all promise resolutions and executes them all at once.
//
// An option may be to prioritize the current segment, but compare
// its transaction ID to the transaction ID of the `thenSegment`. If
// they are the same, use the current segment, otherwise use the
// `thenSegment`. I'd prefer to wait for the simpler method to be
// proven invalid.
var segment =
promise.__NR_segment || thenSegment || agent.tracer.getSegment()
if (segment) {
bindLink(promise, segment)
} else {
return fn.apply(this, arguments)
}
}
// invoke linkTransaction()
return promise.__NR_wrapper(this, fn, name, next, arguments, promise)
}
}
}
}
/**
* Creates a wrapper around the static `Promise` factory method.
*/
function wrapCast(cast, name) {
if (!(cast instanceof Function) || cast.name === '__NR_wrappedCast') {
return cast
}
var CAST_SEGMENT_NAME = 'Promise.' + name
return function __NR_wrappedCast() {
var segment = _createSegment(CAST_SEGMENT_NAME)
var prom = cast.apply(this, arguments)
if (segment) {
bindLink(prom, segment)
}
return prom
}
}
function _createSegment(name, parent) {
return agent.config.feature_flag.promise_segments === true
? agent.tracer.createSegment(name, null, parent)
: (parent || agent.tracer.getSegment())
}
}
/**
* Performs a `wrapMethod` if and only if `methods` is truthy and has a length
* greater than zero.
*
* @param {object} obj - The source of the methods to wrap.
* @param {string} name - The name of this source.
* @param {string|array} methods - The names of the methods to wrap.
* @param {function} wrapper - The function which wraps the methods.
*/
function _safeWrap(obj, name, methods, wrapper) {
if (methods && methods.length) {
shimmer.wrapMethod(obj, name, methods, wrapper)
}
}
function _setInternalProperty(obj, name, val) {
if (!obj || !name) {
logger.debug('Not setting property; object or name is missing.')
return obj
}
try {
if (!obj.hasOwnProperty(name)) {
Object.defineProperty(obj, name, {
enumerable: false,
writable: true,
value: val
})
} else {
obj[name] = val
}
} catch (err) {
logger.debug({err: err}, 'Failed to set property "%s" to %j', name, val)
}
return obj
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 | 1 1 1 1 | 'use strict'
var wrap = require('../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, Q) {
function wrapUninstrumented(original, method) {
return agent.tracer.wrapFunctionFirstNoSegment(original, method)
}
if (Q.nextTick) {
// The wrap() call for nextTick wipes the sub-function. Save a reference
// now so it can be restored later
var savedRunAfter = Q.nextTick.runAfter
wrap(Q, 'Q', 'nextTick', wrapUninstrumented)
if (savedRunAfter) {
Q.nextTick.runAfter = savedRunAfter
wrap(Q.nextTick, 'Q.nextTick', 'runAfter', wrapUninstrumented)
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 | 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'redis'})
var stringifySync = require('../util/safe-json').stringifySync
var shimmer = require('../shimmer')
var urltils = require('../util/urltils.js')
var recordRedis = require('../metrics/recorders/redis.js')
var REDIS = require('../metrics/names').REDIS
module.exports = function initialize(agent, redis) {
var tracer = agent.tracer
var redisPrototype = redis && redis.RedisClient && redis.RedisClient.prototype
if (redisPrototype) {
if (redisPrototype.internal_send_command) {
shimmer.wrapMethod(
redisPrototype,
'redis.RedisClient.prototype',
'internal_send_command',
function wrapSendCommand(original) {
return tracer.wrapFunction(
REDIS.OPERATION + 'Unknown',
recordRedis,
original,
internalSendCommandWrapper
)
}
)
} else {
shimmer.wrapMethod(
redisPrototype,
'redis.RedisClient.prototype',
'send_command',
function wrapSendCommand(original) {
return tracer.wrapFunction(
REDIS.OPERATION + 'Unknown',
recordRedis,
original,
sendCommandWrapper
)
}
)
}
}
function sendCommandWrapper(segment, args, bind) {
var position = args.length - 1
var keys = args[1]
var last = args[position]
segment.name = REDIS.OPERATION + (args[0] || 'unknown')
if (keys && typeof keys !== 'function') {
urltils.copyParameters(agent.config,
{key: stringifySync(keys[0], 'Unknown')}, segment.parameters)
}
// capture connection info for datastore instance metric
captureInstanceAttributes(segment, this)
if (typeof last === 'function') {
args[position] = bind(last, true, true)
} else if (Array.isArray(last) && typeof last[last.length - 1] === 'function') {
last[last.length - 1] = bind(last[last.length - 1], true, true)
} else { // let's shove a callback in there for fun
args.push(bind(null, true, true))
}
return args
}
function internalSendCommandWrapper(segment, args, bind) {
var keys = args[0].args
var command = args[0].command
var cb = args[0].callback
if (cb instanceof Function) {
args[0].callback = bind(cb, true, true)
} else {
var self = this
args[0].callback = tracer.bindFunction(function __NR_redisCallback(err) {
if (err && self.emit instanceof Function) {
self.emit('error', err)
}
}, segment, true)
}
segment.name = REDIS.OPERATION + (command || 'unknown')
if (keys && typeof keys !== 'function') {
urltils.copyParameters(agent.config,
{key: stringifySync(keys[0], 'Unknown')}, segment.parameters)
}
// capture connection info for datastore instance metric
captureInstanceAttributes(segment, this)
return args
}
}
function captureInstanceAttributes(segment, client) {
if (client.hasOwnProperty('port') && client.hasOwnProperty('host')) {
// for redis <=0.11
doCapture(client)
} else if (client.hasOwnProperty('connection_options')) {
// for redis 2.4.0 - 2.6.2
doCapture(client.connection_options)
} else if (client.hasOwnProperty('connectionOption')) {
// for redis 0.12 - 2.2.5
doCapture(client.connectionOption)
} else if (client.hasOwnProperty('options')) {
// for redis 2.3.0 - 2.3.1
doCapture(client.options)
} else {
logger.debug('Could not access instance attributes on connection.')
}
function doCapture(opts) {
var db = (client.hasOwnProperty('selected_db') ? client.selected_db : opts.db) || 0
segment.captureDBInstanceAttributes(
opts.host || 'localhost',
opts.path || opts.port || '6379',
db
)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../shimmer')
var logger = require('../logger.js').child({component: 'restify'})
var urltils = require('../util/urltils.js')
var NAMES = require('../metrics/names.js')
function nameFromRoute(segment, route, context) {
if (!segment) return logger.error("No New Relic context to set Restify route name on.")
if (!route) return logger.error("No Restify route to use for naming.")
var transaction = segment.transaction
var path = (route.spec && (route.spec.path || route.spec.name)) || route.name
var params = context || route.params
if (params) {
urltils.copyParameters(transaction.agent.config, params, segment.parameters)
}
if (!path) return logger.warn({route: route}, "No path found on Restify route.")
// when route is a regexp, route.spec.path will be a regexp
if (path instanceof RegExp) path = path.source
transaction.nameState.setName(NAMES.RESTIFY.PREFIX, transaction.verb,
NAMES.ACTION_DELIMITER, path)
}
module.exports = function initialize(agent, restify) {
/* Restify doesn't directly expose its Router constructor. We need to wait
* until a server is created, and then grab the constructor off of it.
*
* Unfortunately, we can't create a server ourselves as doing so causes
* Restify's req.query extension to override express' req.query extension of
* IncomingMessage. This is an issue if an app uses express for serving while
* using Restify only for client-side REST requests.
*/
shimmer.wrapMethod(
restify,
'restify',
'createServer',
function cb_wrapMethod(createServer) {
return function wrappedCreateServer() {
agent.environment.setDispatcher('restify')
agent.environment.setFramework('restify')
var server = createServer.apply(this, arguments)
var Router = server.router.constructor
/* Now that we have created a server, we have access to the Router
* constructor and can instrument it.
*/
shimmer.wrapMethod(
Router.prototype,
'Router.prototype',
'find',
function cb_wrapMethod(find) {
return function wrappedFind(req, res, callback) {
var tracer = agent.tracer
if (!tracer.getTransaction()) {
logger.trace("Restify router invoked outside transaction.")
return find.apply(this, arguments)
}
var wrapped = function wrappedRoute(error, route, context) {
nameFromRoute(tracer.getSegment(), route, context)
return callback(error, route, context)
}
return find.call(this, req, res, wrapped)
}
}
)
return server
}
}
)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | 1 1 1 1 | 'use strict'
var promInit = require('./promise')
var STATIC_PROMISE_METHODS = [
'reject', 'resolve', 'all', 'any', 'some', 'map', 'reduce', 'filter', 'reduceRight'
]
var WHEN_SPEC = {
name: 'when',
constructor: 'Promise',
executor: true,
$proto: {
then: ['then', 'done', 'spread', 'finally', 'ensure'],
catch: ['catch', 'otherwise']
},
$static: {
cast: STATIC_PROMISE_METHODS,
$copy: STATIC_PROMISE_METHODS.concat([
'_defer',
'_handler',
'race',
'_traverse',
'_visitRemaining',
'settle',
'iterate',
'unfold',
'never'
]),
$passThrough: [
'enterContext',
'exitContext',
'createContext',
'onFatalRejection',
'onPotentiallyUnhandledRejectionHandled',
'onPotentiallyUnhandledRejection'
]
},
$library: {
cast: STATIC_PROMISE_METHODS
}
}
module.exports = function initialize(agent, library) {
promInit(agent, library, WHEN_SPEC)
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| child_process.js | 57.14% | (4 / 7) | 100% | (0 / 0) | 0% | (0 / 2) | 57.14% | (4 / 7) | |
| crypto.js | 62.5% | (5 / 8) | 100% | (0 / 0) | 0% | (0 / 3) | 62.5% | (5 / 8) | |
| dns.js | 40% | (4 / 10) | 100% | (0 / 0) | 0% | (0 / 3) | 40% | (4 / 10) | |
| domain.js | 26.67% | (4 / 15) | 0% | (0 / 5) | 0% | (0 / 3) | 26.67% | (4 / 15) | |
| fs.js | 26.67% | (8 / 30) | 0% | (0 / 4) | 0% | (0 / 6) | 26.67% | (8 / 30) | |
| globals.js | 22.22% | (8 / 36) | 0% | (0 / 29) | 0% | (0 / 8) | 22.22% | (8 / 36) | |
| http.js | 10% | (31 / 310) | 0% | (0 / 189) | 0% | (0 / 23) | 10.2% | (31 / 304) | |
| net.js | 18.75% | (9 / 48) | 0% | (0 / 31) | 0% | (0 / 9) | 20% | (9 / 45) | |
| timers.js | 14.81% | (4 / 27) | 0% | (0 / 16) | 0% | (0 / 6) | 15.38% | (4 / 26) | |
| zlib.js | 35.71% | (5 / 14) | 0% | (0 / 11) | 0% | (0 / 2) | 35.71% | (5 / 14) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, childProcess) {
var methods = ['exec', 'execFile']
wrap(childProcess, 'childProcess', methods, wrapMethod)
function wrapMethod(fn, method) {
return agent.tracer.wrapFunctionLast('child_process.' + method, null, fn)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 | 1 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, crypto) {
wrap(crypto, 'crypto', ['pbkdf2', 'randomBytes', 'pseudoRandomBytes'], wrapCryptoMethod)
function wrapCryptoMethod(fn, method) {
return agent.tracer.wrapFunctionLast('crypto.' + method, null, wrappedCrypto)
function wrappedCrypto() {
return fn.apply(this, arguments)
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 | 1 1 1 1 | 'use strict'
var shimmer = require('../../shimmer')
module.exports = initialize
function initialize(agent, dns) {
var methods = [
'lookup',
'resolve',
'resolve4',
'resolve6',
'resolveCname',
'resolveMx',
'resolveNaptr',
'resolveNs',
'resolvePtr',
'resolveSrv',
'resolveTxt',
'reverse'
]
shimmer.wrapMethod(dns, 'dns', methods, function wrapMethods(fn, method) {
return agent.tracer.wrapFunction('dns.' + method, null, fn, wrapDnsArgs)
})
function wrapDnsArgs(segment, args) {
var lastIdx = args.length - 1
args[lastIdx] = agent.tracer.bindFunction(args[lastIdx], segment, true)
return args
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 | 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, domain) {
var proto = domain.Domain.prototype
wrap(
proto,
'domain.Domain.prototype',
'emit',
wrapEmit
)
function wrapEmit(original) {
return function wrappedEmit(ev) {
var shouldRestoreContext = ev === 'error' &&
agent.tracer.segment === null &&
this.__NR_transactionSegment
if (!shouldRestoreContext) {
return original.apply(this, arguments)
}
agent.tracer.segment = this.__NR_transactionSegment
try {
return original.apply(this, arguments)
} finally {
agent.tracer.segment = null
this.__NR_transactionSegment = null
}
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 | 1 1 1 1 1 1 1 1 | 'use strict'
var record = require('../../metrics/recorders/generic.js')
var NAMES = require('../../metrics/names.js')
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, fs) {
var methods = [
'rename',
'truncate',
'chown',
'lchown',
'fchown',
'chmod',
'lchmod',
'fchmod',
'stat',
'lstat',
'fstat',
'link',
'symlink',
'readlink',
'realpath',
'unlink',
'rmdir',
'mkdir',
'mkdtemp',
'readdir',
'close',
'open',
'utimes',
'futimes',
'fsync',
'readFile',
'writeFile',
'appendFile',
'exists',
'ftruncate'
]
var uninstrumented = [
'write',
'read'
]
wrap(fs, 'fs', methods, segment)
wrap(fs, 'fs', uninstrumented, agent.tracer.wrapFunctionNoSegment.bind(agent.tracer))
wrap(fs, 'fs', ['watch'], wrapWatch)
wrap(fs, 'fs', ['watchFile'], wrapWatchFile)
function segment(fn, method) {
return agent.tracer.wrapFunctionLast(NAMES.FS.PREFIX + method, record, fn)
}
function wrapWatch(fn) {
return function wrappedWatch() {
var args = agent.tracer.slice(arguments)
var last = args.length - 1
if (typeof args[last] === 'function') {
var cb = args[last]
args[last] = agent.tracer.bindFunction(cb)
}
return agent.tracer.bindEmitter(fn.apply(this, args))
}
}
function wrapWatchFile(fn) {
return function wrappedWatchFile() {
var args = agent.tracer.slice(arguments)
var last = args.length - 1
if (typeof args[last] === 'function') {
var cb = args[last]
args[last] = agent.tracer.bindFunction(cb)
// allow unwatchFile to work despite cb being wrapped
args[last].listener = cb
}
return fn.apply(this, args)
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 | 1 1 1 1 1 1 1 1 | 'use strict'
var events = require('events')
var wrap = require('../../shimmer').wrapMethod
var promInit = require('../promise')
module.exports = initialize
/**
* The spec for the native `Promise` class.
*/
var STATIC_PROMISE_METHODS = ['accept', 'all', 'defer', 'race', 'reject', 'resolve']
var NATIVE_PROMISE_SPEC = {
name: 'global',
constructor: 'Promise',
executor: true,
$proto: {
then: ['then', 'chain'],
catch: ['catch']
},
$static: {
$copy: STATIC_PROMISE_METHODS,
cast: STATIC_PROMISE_METHODS
}
}
function initialize(agent) {
// Add handler for uncaught/fatal exceptions to record them.
// _fatalException is an undocumented feature of domains, introduced in
// Node.js v0.8. We use _fatalException when possible because wrapping it will
// not potentially change the behavior of the server.
if (process._fatalException) {
wrap(process, 'process', '_fatalException', function wrapper(original) {
return function wrappedFatalException(error) {
// Only record the error if we are not currently within an instrumented
// domain.
if (!process.domain) {
agent.errors.add(null, error)
agent.tracer.segment = null
}
return original.apply(this, arguments)
}
})
wrap(
process,
'process',
'emit',
function wrapEmit(original) {
return function wrappedEmit(ev, error, promise) {
// Check for unhandledRejections here so we don't change the
// behavior of the event
if (ev === 'unhandledRejection' && error && !process.domain) {
if (listenerCount(process, 'unhandledRejection') === 0) {
// If there are no unhandledRejection handlers report the error
var transaction = promise.__NR_segment && promise.__NR_segment.transaction
agent.errors.add(transaction, error)
}
}
return original.apply(this, arguments)
}
}
)
} else {
wrap(
process,
'process',
'emit',
function wrapEmit(original) {
return function wrappedEmit(ev, error, promise) {
if (ev === 'uncaughtException' && error && !process.domain) {
agent.errors.add(null, error)
agent.tracer.segment = null
}
// Check for unhandledRejections here so we don't change the
// behavior of the event
if (ev === 'unhandledRejection' && error && !process.domain) {
// If there are no unhandledRejection handlers report the error
if (listenerCount(process, 'unhandledRejection') === 0) {
var transaction = promise.__NR_segment && promise.__NR_segment.transaction
agent.errors.add(transaction, error)
}
}
return original.apply(this, arguments)
}
}
)
}
promInit(agent, global, NATIVE_PROMISE_SPEC)
}
function listenerCount(emitter, evnt) {
if (events.EventEmitter.listenerCount) {
return events.EventEmitter.listenerCount(emitter, evnt)
}
return emitter.listeners(evnt).length
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var shimmer = require('../../shimmer.js')
var logger = require('../../logger').child({component: 'http'})
var recordWeb = require('../../metrics/recorders/http.js')
var hashes = require('../../util/hashes.js')
var cat = require('../../util/cat.js')
var instrumentOutbound = require('../../transaction/tracer/instrumentation/outbound.js')
var util = require('util')
var url = require('url')
var semver = require('semver')
var NAMES = require('../../metrics/names.js')
/*
*
* CONSTANTS
*
*/
var NR_CONNECTION_PROP = '__NR__connection'
var DEFAULT_HOST = 'localhost'
var DEFAULT_PORT = 80
var REQUEST_HEADER = 'x-request-start'
var QUEUE_HEADER = 'x-queue-start'
var NEWRELIC_ID_HEADER = 'x-newrelic-id'
var NEWRELIC_APP_DATA_HEADER = 'x-newrelic-app-data'
var NEWRELIC_TRANSACTION_HEADER = 'x-newrelic-transaction'
var NEWRELIC_SYNTHETICS_HEADER = 'x-newrelic-synthetics'
var CONTENT_LENGTH_REGEX = /^Content-Length$/i
// For incoming requests this instrumentation functions by wrapping
// `http.createServer` and `http.Server#addListener`. The former merely sets the
// agent dispatcher to 'http' and the latter wraps any event handlers bound to
// `request`.
//
// The `request` event listener wrapper creates a transaction proxy which will
// start a new transaction whenever a new request comes in. It also scans the
// headers of the incoming request looking for CAT and synthetics headers.
function wrapListener(agent, listener) {
if (!listener) {
logger.debug('No request listener defined, not wrapping.')
return listener
}
var tracer = agent.tracer
var serverPort = null
return tracer.transactionProxy(function wrappedHandler(request, response) {
var transaction = tracer.getTransaction()
if (!transaction) return listener.apply(this, arguments)
transaction.nameState.setPrefix(NAMES.NODEJS.PREFIX)
var collectedRequestHeaders = [
'accept',
'contentLength',
'contentType',
'referer',
'host'
]
if (request) {
for (var i = 0; i < collectedRequestHeaders.length; i++) {
var headerKey = collectedRequestHeaders[i]
var header = request.headers[headerKey.toLowerCase()]
if (header !== undefined) {
// If any more processing of the headers is required consider refactoring this.
if (headerKey === 'referer') {
var queryParamIndex = header.indexOf('?')
if (queryParamIndex !== -1) {
header = header.substring(0, queryParamIndex)
}
}
var attributeName = 'request.headers.' + headerKey
transaction.addAgentAttribute(attributeName, header)
}
}
if (request.method !== undefined) {
transaction.addAgentAttribute('request.method', request.method)
}
if (request.headers['user-agent'] !== undefined) {
transaction.addAgentAttribute('request.headers.userAgent',
request.headers['user-agent'])
}
}
// Create the transaction segment using the request URL for now. Once a
// better name can be determined this segment will be renamed to that.
var segment = tracer.createSegment(request.url, recordWeb)
segment.start()
if (agent.config.feature_flag.custom_instrumentation) {
transaction.webSegment = segment
}
/* Needed for Connect and Express middleware that monkeypatch request
* and response via listeners.
*/
tracer.bindEmitter(request, segment)
tracer.bindEmitter(response, segment)
// the error tracer needs a URL for tracing, even though naming overwrites
transaction.parsedUrl = url.parse(request.url, true)
transaction.url = transaction.parsedUrl.pathname
transaction.verb = request.method
// URL is sent as an agent attribute with transaction events
if (agent.config.feature_flag.send_request_uri_attribute) {
transaction.addAgentAttribute('request_uri', transaction.url)
}
// store the port on which this transaction runs
if (this.address instanceof Function) {
var address = this.address()
if (address) {
serverPort = address.port
}
}
transaction.port = serverPort
// need to set any config-driven names early for RUM
logger.trace({url: request.url, transaction: transaction.id},
'Applying user naming rules for RUM.')
transaction.applyUserNamingRules(request.url)
/**
* Calculate Queue Time
*
* Queue time is provided by certain providers by stamping the request
* header with the time the request arrived at the router.
*
* Units for queue time are
*/
var qtime = request.headers[REQUEST_HEADER] || request.headers[QUEUE_HEADER]
if (qtime) {
var split = qtime.split('=')
if (split.length > 1) {
qtime = split[1]
}
var start = parseFloat(qtime)
if (isNaN(start)) {
logger.warn('Queue time header parsed as NaN (' + qtime + ')')
} else {
// nano seconds
if (start > 1e18) start = start / 1e6
// micro seconds
else if (start > 1e15) start = start / 1e3
// seconds
else if (start < 1e12) start = start * 1e3
transaction.queueTime = Date.now() - start
}
}
if (agent.config.feature_flag.cat) {
var encKey = agent.config.encoding_key
var incomingCatId = request.headers[NEWRELIC_ID_HEADER]
var obfTransaction = request.headers[NEWRELIC_TRANSACTION_HEADER]
var synthHeader = request.headers[NEWRELIC_SYNTHETICS_HEADER]
if (encKey) {
cat.handleCatHeaders(incomingCatId, obfTransaction, encKey, transaction)
if (transaction.incomingCatId) {
logger.trace('Got inbound request CAT headers in transaction %s',
transaction.id)
}
if (synthHeader && agent.config.trusted_account_ids) {
handleSyntheticsHeader(
synthHeader,
encKey,
agent.config.trusted_account_ids,
transaction
)
}
}
}
function instrumentedFinish() {
// Remove listeners so this doesn't get called twice.
response.removeListener('finish', instrumentedFinish)
request.removeListener('aborted', instrumentedFinish)
// Naming must happen before the segment and transaction are ended,
// because metrics recording depends on naming's side effects.
transaction.setName(transaction.parsedUrl, response.statusCode)
if (response) {
if (response.statusCode !== undefined) {
var statusCode = response.statusCode
if (typeof statusCode.toString === 'function') {
var responseCode = statusCode.toString()
if (typeof responseCode === 'string') {
transaction.addAgentAttribute('httpResponseCode', responseCode)
}
}
var responseStatus = parseInt(statusCode, 10)
if (!isNaN(responseStatus)) {
transaction.addAgentAttribute('response.status', responseStatus)
}
}
if (response.statusMessage !== undefined) {
transaction.addAgentAttribute('httpResponseMessage', response.statusMessage)
}
var contentLength = response.getHeader('content-length')
if (contentLength) {
transaction.addAgentAttribute(
'response.headers.contentLength',
parseInt(contentLength, 10)
)
}
var contentType = response.getHeader('content-type')
if (contentType) {
transaction.addAgentAttribute(
'response.headers.contentType',
contentType
)
}
}
// This should be the last thing called before the web segment finishes.
segment.markAsWeb(transaction.parsedUrl)
segment.end()
transaction.end()
}
response.once('finish', instrumentedFinish)
request.once('aborted', instrumentedFinish)
return tracer.bindFunction(listener, segment).apply(this, arguments)
})
}
// FLAG: cat this wont be used unless cat is enabled, see below where we
// actually do the shimmer stuff if you'd like to verify.
function wrapWriteHead(agent, writeHead) {
return function wrappedWriteHead() {
var transaction = agent.tracer.getTransaction()
if (!transaction) {
logger.trace('No transaction - not adding response CAT headers')
return writeHead.apply(this, arguments)
}
// FLAG: synthetics
if (agent.config.feature_flag.synthetics && transaction.syntheticsHeader) {
this.setHeader(NEWRELIC_SYNTHETICS_HEADER, transaction.syntheticsHeader)
}
if (!transaction.incomingCatId) {
logger.trace('No incoming CAT ID - not adding response CAT headers')
return writeHead.apply(this, arguments)
}
if (!agent.config.trusted_account_ids) {
logger.trace('No account IDs defined in config.trusted_account_ids - ' +
'not adding response CAT headers')
return writeHead.apply(this, arguments)
}
var accountId = transaction.incomingCatId.split('#')[0]
accountId = parseInt(accountId, 10)
if (agent.config.trusted_account_ids.indexOf(accountId) === -1) {
logger.trace('Request from untrusted CAT header account id: %s - ' +
'not adding response CAT headers', accountId)
return writeHead.apply(this, arguments)
}
// Not sure this could ever happen, but should guard against it anyway
// otherwise exception we blow up the user's app.
if (!agent.config.cross_process_id || !agent.config.encoding_key) {
logger.trace(
'Managed to have agent.config.trusted_account_ids but not cross_process_id ' +
'(%s) or encoding_key (%s) - not adding response CAT headers',
agent.config.cross_process_id,
agent.config.encoding_key
)
return writeHead.apply(this, arguments)
}
// -1 means no content length header was sent. We should only send this
// value in the appData if the header is set.
var contentLength = -1
var new_headers = arguments[arguments.length - 1]
if (typeof new_headers === 'object') {
for (var header in new_headers) { // jshint ignore: line
if (CONTENT_LENGTH_REGEX.test(header)) {
contentLength = new_headers[header]
break
}
}
}
if (contentLength === -1 && this._headers) {
// JSHint complains about ownProperty stuff, but since we are looking
// for a specific name that doesn't matter so I'm disabling it.
// Outbound headers can be capitalized in any way, use regex instead
// of direct lookup.
for (var userHeader in this._headers) { // jshint ignore: line
if (CONTENT_LENGTH_REGEX.test(userHeader)) {
contentLength = this._headers[userHeader]
break
}
}
}
// Stored on the tx so we can push a metric with this time instead of
// actual duration.
transaction.catResponseTime = transaction.timer.getDurationInMillis()
var appData
var txName = transaction.name || transaction.nameState.getName() || ''
try {
if (txName) {
txName = NAMES.WEB.RESPONSE_TIME + '/' + txName
}
appData = JSON.stringify([
agent.config.cross_process_id, // cross_process_id
txName, // transaction name
transaction.queueTime / 1000, // queue time (s)
transaction.catResponseTime / 1000, // response time (s)
contentLength, // content length (if content-length header is also being sent)
transaction.id, // TransactionGuid
false // force a transaction trace to be recorded
])
} catch (err) {
logger.trace(err, 'Failed to serialize transaction: %s - ' +
'not adding CAT response headers',
txName)
return writeHead.apply(this, arguments)
}
var encKey = agent.config.encoding_key
var obfAppData = hashes.obfuscateNameUsingKey(appData, encKey)
this.setHeader(NEWRELIC_APP_DATA_HEADER, obfAppData)
logger.trace('Added outbound response CAT headers in transaction %s', transaction.id)
return writeHead.apply(this, arguments)
}
}
function wrapRequest(agent, request) {
// TODO: early return in the !transaction || internalOnly case
return function wrappedRequest(options) {
var tracer = agent.tracer
var transaction = tracer.getTransaction()
var outboundHeaders = {}
var args = tracer.slice(arguments)
var context = this
var needsHeaders = false
// don't pollute metrics and calls with NR connections
var internalOnly = options && options[NR_CONNECTION_PROP]
if (internalOnly) options[NR_CONNECTION_PROP] = undefined
if (transaction && !internalOnly && agent.config.encoding_key) {
// FLAG: synthetics
if (agent.config.feature_flag.synthetics && transaction.syntheticsHeader) {
outboundHeaders[NEWRELIC_SYNTHETICS_HEADER] = transaction.syntheticsHeader
}
// FLAG: cat
if (agent.config.feature_flag.cat) {
if (agent.config.obfuscatedId) {
outboundHeaders[NEWRELIC_ID_HEADER] = agent.config.obfuscatedId
}
var pathHash = hashes.calculatePathHash(
agent.config.applications()[0],
transaction.name || transaction.nameState.getName() || '',
transaction.referringPathHash
)
transaction.pushPathHash(pathHash)
var txData = [
transaction.id,
false,
transaction.tripId || transaction.id,
pathHash
]
try {
txData = JSON.stringify(txData)
var txHeader = hashes.obfuscateNameUsingKey(txData, agent.config.encoding_key)
outboundHeaders[NEWRELIC_TRANSACTION_HEADER] = txHeader
logger.trace('Added outbound request CAT headers in transaction %s',
transaction.id)
} catch (err) {
logger.trace(err, 'Failed to serialize outbound response header')
}
}
}
var headers = Object.keys(outboundHeaders)
var i, l
if (transaction && !internalOnly) {
if (util.isArray(options.headers)) {
options = util._extend({}, options)
options.headers = options.headers.slice()
args[0] = options
for (i = 0, l = headers.length; i < l; ++i) {
options.headers.push([headers[i], outboundHeaders[headers[i]]])
}
} else if (typeof options === 'object' &&
options.headers && options.headers.expect) {
options = util._extend({}, options)
options.headers = util._extend({}, options.headers)
options.headers = util._extend(options.headers, outboundHeaders)
args[0] = options
} else {
needsHeaders = true
}
var request_url = options
// If the request options are a string, parse it as a URL object.
if (typeof options === 'string') {
request_url = url.parse(options)
}
// hostname & port logic pulled directly from node's 0.10 lib/http.js
var hostname = request_url.hostname || request_url.host || DEFAULT_HOST
var port = request_url.port || request_url.defaultPort || DEFAULT_PORT
return instrumentOutbound(agent, hostname, port, makeRequest)
}
return makeRequest()
function makeRequest() {
var requested = request.apply(context, args)
if (!needsHeaders) return requested
try {
for (i = 0, l = headers.length; i < l; ++i) {
requested.setHeader(headers[i], outboundHeaders[headers[i]])
}
} catch (err) {
if (options && options.headers && typeof options.headers === 'object') {
logger.warn(
'Could not set cat header, header written with: ',
Object.keys(options.headers)
)
} else {
logger.warn('Could not set cat header, header already written')
}
}
return requested
}
}
}
function wrapLegacyRequest(agent, request) {
return function wrappedLegacyRequest(method, path, headers) {
var makeRequest = request.bind(this, method, path, headers)
if (agent.tracer.getTransaction()) {
return instrumentOutbound(agent, this.host, this.port, makeRequest)
}
return makeRequest()
}
}
function wrapLegacyClient(agent, proto) {
shimmer.wrapMethod(
proto,
'http.Client.prototype',
'request',
wrapLegacyRequest.bind(null, agent)
)
}
module.exports = function initialize(agent, http, moduleName) {
// FIXME: will this ever not be called?
shimmer.wrapMethod(http, 'http', 'createServer', function cb_wrapMethod(createServer) {
return function setDispatcher(requestListener) { // eslint-disable-line no-unused-vars
agent.environment.setDispatcher('http')
return createServer.apply(this, arguments)
}
})
/**
* It's not a great idea to monkeypatch EventEmitter methods given how hot
* they are, but this method is simple and works with all versions of
* node supported by the module.
*/
shimmer.wrapMethod(
http && http.Server && http.Server.prototype,
'http.Server.prototype',
['on', 'addListener'],
function cb_wrapMethod(addListener) {
return function cls_wrapMethod(type, listener) {
if (type === 'request' && listener instanceof Function) {
return addListener.call(this, type, wrapListener(agent, listener))
}
return addListener.apply(this, arguments)
}
}
)
// FLAG: cat
if (agent.config.feature_flag.cat) {
shimmer.wrapMethod(http && http.ServerResponse && http.ServerResponse.prototype,
'http.ServerResponse.prototype',
'writeHead',
wrapWriteHead.bind(null, agent))
}
/**
* As of node 0.8, http.request() is the right way to originate outbound
* requests.
*/
if (http && http.Agent && http.Agent.prototype && http.Agent.prototype.request) {
// Node 0.11+ always uses an Agent.
shimmer.wrapMethod(
http.Agent.prototype,
'http.Agent.prototype',
'request',
wrapRequest.bind(null, agent)
)
} else if (moduleName !== 'https' || semver.satisfies(process.version, '<=0.10.x')) {
shimmer.wrapMethod(
http,
'http',
'request',
wrapRequest.bind(null, agent)
)
}
// http.Client is deprecated, but still in use
var DeprecatedClient, deprecatedCreateClient
function clearGetters() {
if (DeprecatedClient) {
delete http.Client
http.Client = DeprecatedClient
}
if (deprecatedCreateClient) {
delete http.createClient
http.createClient = deprecatedCreateClient
}
}
DeprecatedClient = shimmer.wrapDeprecated(
http,
'http',
'Client',
{
get: function get() {
var example = new DeprecatedClient(80, 'localhost')
wrapLegacyClient(agent, example.constructor.prototype)
clearGetters()
return DeprecatedClient
},
set: function set(NewClient) {
DeprecatedClient = NewClient
}
}
)
deprecatedCreateClient = shimmer.wrapDeprecated(
http,
'http',
'createClient',
{
get: function get() {
var example = deprecatedCreateClient(80, 'localhost')
wrapLegacyClient(agent, example.constructor.prototype)
clearGetters()
return deprecatedCreateClient
},
set: function set(newCreateClient) {
deprecatedCreateClient = newCreateClient
}
}
)
}
/**
* Take the X-NewRelic-Synthetics header and apply any appropriate data to the
* transaction for later use. This is the gate keeper for attributes being
* added onto the transaction object for synthetics.
*
* @param {string} header - The raw X-NewRelic-Synthetics header
* @param {string} encKey - Encoding key handed down from the server
* @param {̄Array} trustedIds - Array of accounts to trust the header from.
* @param {Transaction} transaction - Where the synthetics data is attached to.
*/
function handleSyntheticsHeader(header, encKey, trustedIds, transaction) {
var synthData = parseSyntheticsHeader(header, encKey, trustedIds)
if (!synthData) {
return
}
transaction.syntheticsData = synthData
transaction.syntheticsHeader = header
}
/**
* Parse out and verify the the pieces of the X-NewRelic-Synthetics header.
*
* @param {string} header - The raw X-NewRelic-Synthetics header
* @param {string} encKey - Encoding key handed down from the server
* @param {̄Array} trustedIds - Array of accounts to trust the header from.
* @return {Object or null} - On successful parse and verification an object of
* synthetics data is returned, otherwise null is
* returned.
*/
function parseSyntheticsHeader(header, encKey, trustedIds) {
// Eagerly declare this object because we know what it should look like and
// can use that for header verification.
var parsedData = {
version: null,
accountId: null,
resourceId: null,
jobId: null,
monitorId: null
}
var synthData = null
try {
synthData = JSON.parse(
hashes.deobfuscateNameUsingKey(header, encKey)
)
} catch (e) {
logger.trace(e, 'Got unparsable synthetics header: %s', header)
return
}
if (!util.isArray(synthData)) {
logger.trace(
'Synthetics data is not an array: %s (%s)',
synthData,
typeof synthData
)
return
}
if (synthData.length < Object.keys(parsedData).length) {
logger.trace(
'Synthetics header length is %s, expected at least %s',
synthData.length,
Object.keys(parsedData).length
)
}
parsedData.version = synthData[0]
if (parsedData.version !== 1) {
logger.trace(
'Synthetics header version is not 1, got: %s (%s)',
parsedData.version,
synthData
)
return
}
parsedData.accountId = synthData[1]
if (parsedData.accountId) {
if (trustedIds.indexOf(parsedData.accountId) === -1) {
logger.trace(
'Synthetics header account ID is not in trusted account IDs: %s (%s)',
parsedData.accountId,
trustedIds
)
return
}
} else {
logger.trace('Synthetics header account ID missing.')
return
}
parsedData.resourceId = synthData[2]
if (!parsedData.resourceId) {
logger.trace('Synthetics resource ID is missing.')
return
}
parsedData.jobId = synthData[3]
if (!parsedData.jobId) {
logger.trace('Synthetics job ID is missing.')
}
parsedData.monitorId = synthData[4]
if (!parsedData.monitorId) {
logger.trace('Synthetics monitor ID is missing.')
}
return parsedData
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 | 1 1 1 1 1 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
function initialize(agent, net) {
wrap(net.Server.prototype, 'net.Server.prototype', '_listen2', wrapListen2)
wrap(net.Socket.prototype, 'net.Socket.prototype', 'connect', wrapConnect)
function wrapListen2(original) {
return function wrappedListen2() {
var segment = agent.tracer.getSegment()
var emit = this.emit
if (!segment || !emit) return original.apply(this, arguments)
this.emit = wrappedEmit
return original.apply(this, arguments)
function wrappedEmit(ev, socket) {
if (ev !== 'connection' || !socket || !socket._handle) {
return emit.apply(this, arguments)
}
var child = agent.tracer.createSegment('net.Server.onconnection', null, segment)
if (socket._handle.onread) {
socket._handle.onread = agent.tracer.bindFunction(socket._handle.onread, child)
}
return agent.tracer.bindFunction(emit, child, true).apply(this, arguments)
}
}
}
function wrapConnect(original) {
return function connectWrapper() {
if (!agent.getTransaction()) return original.apply(this, arguments)
var socket = this
var args = normalizeConnectArgs(arguments)
return agent.tracer.addSegment(
'net.Socket.connect',
null,
null,
true,
wrappedConnect
)
function wrappedConnect(child) {
if (args[1]) args[1] = agent.tracer.bindFunction(args[1], child)
var result = original.apply(socket, args)
if (socket._handle) {
socket._handle.onread = agent.tracer.bindFunction(socket._handle.onread, child)
}
agent.tracer.bindEmitter(socket, child)
return result
}
}
}
}
// taken from node master on 2013/10/30
function normalizeConnectArgs(args) {
var options = {}
function toNumber(x) {
return (x = Number(x)) >= 0 ? x : false
}
if (typeof args[0] === 'object' && args[0] !== null) {
// connect(options, [cb])
options = args[0]
} else if (typeof args[0] === 'string' && toNumber(args[0]) === false) {
// connect(path, [cb]);
options.path = args[0]
} else {
// connect(port, [host], [cb])
options.port = args[0]
if (typeof args[1] === 'string') {
options.host = args[1]
}
}
var cb = args[args.length - 1]
return typeof cb === 'function' ? [options, cb] : [options]
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
var semver = require('semver')
module.exports = initialize
function initialize(agent, timers) {
// As of iojs 1.6.3 all timers are on the global object, and do
// not need a require('timers') call to access them
if (timers !== global && semver.satisfies(process.version, '>=1.6.3') &&
global.setTimeout && !global.setTimeout.__NR_original) {
initialize(agent, global)
}
var processMethods = ['nextTick', '_nextDomainTick', '_tickDomainCallback']
wrap(process, 'process', processMethods, function bindProcess(original, method) {
return agent.tracer.wrapFunctionFirstNoSegment(original, method)
})
var asynchronizers = [
'setTimeout',
'setInterval'
]
wrap(timers, 'timers', asynchronizers, function wrapTimers(original, method) {
return agent.tracer.wrapFunctionFirst('timers.' + method, null, original)
})
// We don't want to create segments for setImmediate calls, as the
// object allocation may incur too much overhead in some situations
var uninstrumented = [
'setImmediate'
]
wrap(timers, 'timers', uninstrumented, function wrapUninstrumented(original, method) {
return agent.tracer.wrapFunctionFirstNoSegment(original, method)
})
var clearTimeouts = ['clearTimeout']
wrap(timers, 'timers', clearTimeouts, function wrapClear(original) {
return function wrappedClear(timer) {
var segment
if (timer && timer._onTimeout) {
segment = agent.tracer.getSegmentFromWrapped(timer._onTimeout)
timer._onTimeout = agent.tracer.getOriginal(timer._onTimeout)
}
if (timer && timer._onImmediate) {
timer._onImmediate = agent.tracer.getOriginal(timer._onImmediate)
}
if (segment) segment.ignore = true
return original.apply(this, arguments)
}
})
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 | 1 1 1 1 1 | 'use strict'
var wrap = require('../../shimmer').wrapMethod
module.exports = initialize
var methods = [
'deflate',
'deflateRaw',
'gzip',
'gunzip',
'inflate',
'inflateRaw',
'unzip'
]
function initialize(agent, zlib) {
var noSegment = agent.tracer.wrapFunctionNoSegment.bind(agent.tracer)
if (zlib.Deflate && zlib.Deflate.prototype) {
var proto = Object.getPrototypeOf(zlib.Deflate.prototype)
if (proto._transform) {
// streams2
wrap(proto, 'zlib', '_transform', noSegment)
} else if (proto.write && proto.flush && proto.end) {
// plain ol' streams
wrap(proto, 'zlib', ['write', 'flush', 'end'], noSegment)
}
}
wrap(zlib, 'zlib', methods, segment)
function segment(fn, method) {
return agent.tracer.wrapFunctionLast('zlib.' + method, null, fn)
}
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| common.js | 42.86% | (3 / 7) | 0% | (0 / 4) | 0% | (0 / 1) | 42.86% | (3 / 7) | |
| express-2.js | 23.33% | (7 / 30) | 0% | (0 / 14) | 0% | (0 / 3) | 26.92% | (7 / 26) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 | 1 1 1 | 'use strict'
var NAMES = require('../../metrics/names.js')
module.exports.ensurePartialName = ensurePartialName
// Ensures that partialName begins with the express prefix
// http instrumentation will set partialName before passing the request off to express
function ensurePartialName(trans) {
if (trans.nameState.getName() == null ||
trans.nameState.prefix !== NAMES.EXPRESS.PREFIX) {
trans.nameState.setPrefix(NAMES.EXPRESS.PREFIX)
trans.nameState.setVerb(trans.verb)
trans.nameState.setDelimiter(NAMES.ACTION_DELIMITER)
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../../logger.js').child({component: 'express'})
var urltils = require('../../util/urltils.js')
var ensurePartialName = require('./common.js').ensurePartialName
var NAMES = require('../../metrics/names.js')
module.exports.wrapMatchRequest = wrapMatchRequest
function wrapMatchRequest(tracer, version, original) {
return function cls_wrapMatchRequest() {
if (!tracer.getTransaction()) {
logger.trace(
'Express %d router called outside transaction (wrapMatchRequest).',
version
)
return original.apply(this, arguments)
}
var route = original.apply(this, arguments)
nameFromRoute(tracer.getSegment(), route)
return route
}
}
function nameFromRoute(segment, route, params, append) {
if (!segment) return logger.error("No New Relic context to set Express route name on.")
if (!route) return logger.debug("No Express route to use for naming.")
params = route.params
var trans = segment.transaction
var path = route.path || route.regexp
if (!path) return logger.debug({route: route}, "No path found on Express route.")
// when route is a regexp, route.path will be a regexp
if (path instanceof RegExp) path = path.source
urltils.copyParameters(trans.agent.config, params, segment.parameters)
if (append) {
ensurePartialName(trans)
trans.nameState.appendPath(path)
} else {
trans.nameState.setName(
NAMES.EXPRESS.PREFIX,
trans.verb,
NAMES.ACTION_DELIMITER,
path
)
}
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 20.2% | (20 / 99) | 0% | (0 / 49) | 0% | (0 / 19) | 23.26% | (20 / 86) | |
| mapper.js | 14.29% | (5 / 35) | 0% | (0 / 18) | 0% | (0 / 3) | 15.15% | (5 / 33) | |
| names.js | 100% | (32 / 32) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (32 / 32) | |
| normalizer.js | 17.2% | (16 / 93) | 0% | (0 / 48) | 0% | (0 / 12) | 17.98% | (16 / 89) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var Stats = require('../stats')
var ApdexStats = require('../stats/apdex.js')
var NAMES = require('./names')
/*
*
* CONSTANTS
*
*/
var FROM_MILLIS = 1e-3
/**
* A metric is a set of aggregated data (summary statistics) associated with a
* metric name. Some metrics belong to scopes, which are typically the name of
* a transaction or a background task. This class is a collection of mappings
* from names (or scopes and names) to data, as well as functions for
* manipulating those data directly. It also can produce a serialized
* representation suitable for stringifying into JSON and sending to the
* collector.
*
* There are several metrics collections in existence at any given time. Each
* agent has one metrics collection, which is created at the beginning of each
* harvest cycle. Each new transaction also gets its own metrics collection,
* which is merged into the agent's metrics when the transaction is finalized.
* This allows each set of metrics to be added to the harvest cycle atomically,
* which guarantees that each transaction will not have its metrics split
* across multiple harvest cycles. If delivery to the collector fails, the
* metrics collection associated with the failed delivery can be merged back
* into the metrics collection for the ongoing harvest cycle. Finally, if so
* configured, the agent will have an internal set of supportability metrics
* that can be used to report information about the operation of the agent.
*
* Metrics can be remapped, which is a process by which they are assigned a
* short, numerical ID by New Relic. This can shrink the serialized JSON
* considerably. The mapping from transaction name (and scope) happens only
* at serialization time, which allows the mappings from name to ID to happen
* on the fly.
*
* @param {Number} apdexT The apdex-tolerating value, for use in creating apdex
* statistics.
* @param {MetricMapper} mapper The mapper that turns metric names into IDs.
*/
function Metrics(apdexT, mapper, normalizer) {
if (apdexT === undefined || apdexT === null || apdexT === '') {
throw new Error("metrics must be created with apdexT")
}
if (!mapper) throw new Error("metrics must be created with a mapper")
if (!normalizer) throw new Error("metrics must be created with a name normalizer")
this.started = Date.now()
this.apdexT = apdexT
this.mapper = mapper
this.normalizer = normalizer
this.unscoped = {} // {name : stats}
this.scoped = {} // {scope : {name : stats}}
}
/**
* This is the preferred way for interacting with metrics. Set the duration
* (and optionally the amount of that duration that was exclusive to that
* particular metric and not any child operations to that metric) of an
* operation. If there are no data for the name (and optional scope) existing,
* the collection will create a set of data before recording the measurement.
*
* @param {string} name The name of the metric.
* @param {string} scope (Optional) The scope to which the metric belongs.
* @param {Number} duration The duration of the related operation, in milliseconds.
* @param {Number} exclusive (Optional) The portion of the operation specific to this
* metric.
* @return {Stats} The aggregated data related to this metric.
*/
Metrics.prototype.measureMilliseconds = measureMilliseconds
function measureMilliseconds(name, scope, duration, exclusive) {
var stats = this.getOrCreateMetric(name, scope)
stats.recordValueInMillis(duration, exclusive)
return stats
}
/**
* Set the size of an operation. If there are no data for the name existing,
* the collection will create a set of data before recording the measurement.
*
* @param {string} name The name of the metric.
* @param {Number} size The size of the related operation, in bytes.
* @return {Stats} The aggregated data related to this metric.
*/
Metrics.prototype.measureBytes = function measureBytes(name, size) {
var stats = this.getOrCreateMetric(name)
stats.recordValueInBytes(size)
return stats
}
/**
* Look up the mapping from a name (and optionally a scope) to a set of metric
* data for that name, creating the data if they don't already exist.
*
* @param {string} name The name of the requested metric.
* @param {string} scope (Optional) The scope to which the metric is bound.
* @return {Stats} The aggregated data for that name.
*/
Metrics.prototype.getOrCreateMetric = function getOrCreateMetric(name, scope) {
if (!name) throw new Error('Metrics must be named')
var resolved = this._resolve(scope)
if (!resolved[name]) resolved[name] = new Stats()
return resolved[name]
}
/**
* Look up the mapping from a name (and optionally a scope) to a set of metric
* apdex data for that name, creating the data if they don't already exist.
*
* @param {string} name The name of the requested metric.
* @param {string} scope The scope to which the metric is bound
* (optional).
* @param {number} overrideApdex A custom apdexT for this metric, in
* milliseconds. This will be the same for
* a given run, because key transaction metrics
* are set at connect time via server-side
* configuration.
*
* @return {ApdexStats} The aggregated data for that name.
*/
Metrics.prototype.getOrCreateApdexMetric = getOrCreateApdexMetric
function getOrCreateApdexMetric(name, scope, overrideApdex) {
if (!name) throw new Error('Metrics must be named')
var resolved = this._resolve(scope)
if (!resolved[name]) {
// Only use the given override to create the metric if this is not the
// global apdex AND we have a valid value.
var apdexT = name !== NAMES.APDEX && overrideApdex > 0
? (overrideApdex * FROM_MILLIS) : this.apdexT
resolved[name] = new ApdexStats(apdexT)
}
return resolved[name]
}
/**
* Look up a metric, and don't create it if it doesn't exist. Can create scopes
* as a byproduct, but this function is only intended for use in testing, so
* it's not a big deal.
*
* @param {string} name Metric name.
* @param {string} scope (Optional) The scope, if any, to which the metric
* belongs.
* @return {object} Either a stats aggregate, an apdex stats aggregate, or
* undefined.
*/
Metrics.prototype.getMetric = function getMetric(name, scope) {
if (!name) throw new Error('Metrics must be named')
return this._resolve(scope)[name]
}
/**
* Convert this collection into a representation suitable for serialization
* by JSON.stringify and delivery to the collector. Hope you like nested
* arrays!
*
* @return {Object} Set of nested arrays containing metric information.
*/
Metrics.prototype.toJSON = function toJSON() {
return this._toUnscopedData().concat(this._toScopedData())
}
/**
* Combine two sets of metric data. Intended to be used as described above,
* either when folding a transaction's metrics into the agent's metrics for
* later harvest, or one harvest cycle's metrics into the next when a
* delivery attempt to the collector fails. Among the more performance-
* critical pieces of code in the agent, so some performance tuning would
* probably be a good idea.
*
* @param {Metrics} other The collection to be folded into this one.
*/
Metrics.prototype.merge = function merge(other) {
this.started = Math.min(this.started, other.started)
Object.keys(other.unscoped).forEach(function cb_forEach(name) {
if (this.unscoped[name]) {
this.unscoped[name].merge(other.unscoped[name])
} else {
this.unscoped[name] = other.unscoped[name]
}
}, this)
Object.keys(other.scoped).forEach(function cb_forEach(scope) {
Object.keys(other.scoped[scope]).forEach(function cb_forEach(name) {
if (other.scoped[scope][name]) {
var resolved = this._resolve(scope)
if (resolved[name]) {
resolved[name].merge(other.scoped[scope][name])
} else {
resolved[name] = other.scoped[scope][name]
}
}
}, this)
}, this)
}
/**
* Look up the metric namespace belonging to a scope, creating it if it doesn't
* already exist.
*
* @param {string} scope (Optional) The scope to look up.
* @return {object} The namespace associated with the provided scope, or the
* un-scoped metrics if the scope isn't set.
*/
Metrics.prototype._resolve = function _resolve(scope) {
var resolved
if (scope) {
if (!this.scoped[scope]) this.scoped[scope] = {}
resolved = this.scoped[scope]
} else {
resolved = this.unscoped
}
return resolved
}
/**
* Map a metric to its nested-array representation, applying any name -> ID
* mappings along the way. Split from _getScopedData for performance.
*
* @param {string} name The string to look up.
*/
Metrics.prototype._getUnscopedData = function _getUnscopedData(name) {
if (!this.unscoped[name]) return
var normalized = this.normalizer.normalize(name)
if (normalized.ignore || !normalized.value) return
return [this.mapper.map(normalized.value), this.unscoped[name]]
}
/**
* Map a metric to its nested-array representation, applying any name -> ID
* mappings along the way. Split from _getUnscopedData for performance.
*
* @param {string} name The string to look up.
*/
Metrics.prototype._getScopedData = function _getScopedData(name, scope) {
if (!this.scoped[scope][name]) return
var normalized = this.normalizer.normalize(name)
if (normalized.ignore || !normalized.value) return
return [this.mapper.map(normalized.value, scope), this.scoped[scope][name]]
}
/**
* @return {object} A serializable version of the unscoped metrics. Intended
* for use by toJSON.
*/
Metrics.prototype._toUnscopedData = function _toUnscopedData() {
var metricData = []
Object.keys(this.unscoped).forEach(function cb_forEach(name) {
var data = this._getUnscopedData(name)
if (data) metricData.push(data)
}, this)
return metricData
}
/**
* @return {object} A serializable version of the scoped metrics. Intended for
* use by toJSON.
*/
Metrics.prototype._toScopedData = function _toScopedData() {
var metricData = []
Object.keys(this.scoped).forEach(function cb_forEach(scope) {
Object.keys(this.scoped[scope]).forEach(function cb_forEach(name) {
var data = this._getScopedData(name, scope)
if (data) metricData.push(data)
}, this)
}, this)
return metricData
}
module.exports = Metrics
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 | 1 1 1 1 1 | 'use strict'
var logger = require('../logger').child({component: 'mapper'})
/**
* To tighten up the metrics JSON, the collector will maintain a list of
* mappings from metric names (which sometimes include scopes as well) to
* numeric IDs. As the agent sends new metric names to the collector, the
* collector will return corresponding metric IDs, in the expectation that the
* agent will uses those IDs instead of the names going forward.
*
* @param {Array} raw A list of metric spec -> ID mappings represented as
* 2-element arrays: [{name : 'Metric', scope : 'Scope'}, 1]
*/
function MetricMapper(raw) {
this.unscoped = {}
this.scoped = {}
this.length = 0
this.load(raw)
}
/**
* Parse the list of metric mappings returned on metric_data responses from the
* collector. These continue to stream in as the agent runs, so keep adding to
* the collection rather than resetting.
*
* https://hudson.newrelic.com/job/collector-master/javadoc/com/nr/collector/datatypes/MetricData.html
*
* @param {Array} raw A list of metric spec -> ID mappings represented as
* 2-element arrays: [{name : 'Metric', scope : 'Scope'}, 1]
*/
MetricMapper.prototype.load = function load(raw) {
if (!(raw && raw.length)) {
logger.debug("No new metric mappings from server.")
return
}
for (var i = 0; i < raw.length; i++) {
var spec = raw[i][0]
var scope = spec.scope
var name = spec.name
var id = raw[i][1]
var resolved
if (scope) {
if (!this.scoped[scope]) this.scoped[scope] = {}
resolved = this.scoped[scope]
} else {
resolved = this.unscoped
}
if (!resolved[name]) this.length++
resolved[name] = id
logger.trace("Metric spec %s has been mapped to ID %s.", spec, id)
}
logger.debug("Parsed %d metric ids (%d total).", raw.length, this.length)
}
/**
* @param {String} name The metric name.
* @param {String} scope The scope for the metric, if set.
*
* @returns {object} Either a metric spec based on the parameters, or the
* server-sent ID.
*/
MetricMapper.prototype.map = function map(name, scope) {
if (scope) {
if (this.scoped[scope] && this.scoped[scope][name]) {
return this.scoped[scope][name]
}
return {name: name, scope: scope}
}
if (this.unscoped[name]) {
return this.unscoped[name]
}
return {name: name}
}
module.exports = MetricMapper
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var NODEJS = {
PREFIX: 'Nodejs/'
}
var ALL = 'all'
var ERRORS = {
PREFIX: 'Errors/',
ALL: 'Errors/' + ALL,
WEB: 'Errors/allWeb',
OTHER: 'Errors/allOther'
}
var EVENTS = {
WAIT: 'Events/wait'
}
var MEMORY = {
PHYSICAL: 'Memory/Physical',
FREE_HEAP: 'Memory/Heap/Free',
USED_HEAP: 'Memory/Heap/Used',
MAX_HEAP: 'Memory/Heap/Max',
USED_NONHEAP: 'Memory/NonHeap/Used'
}
var CPU = {
SYSTEM_TIME: 'CPU/System Time',
SYSTEM_UTILIZATION: 'CPU/System/Utilization',
USER_TIME: 'CPU/User Time',
USER_UTILIZATION: 'CPU/User/Utilization'
}
var GC = {
PREFIX: 'GC/',
PAUSE_TIME: 'GC/System/Pauses'
}
var VIEW = {
PREFIX: 'View/',
RENDER: '/Rendering'
}
var LOOP = {
PREFIX: NODEJS.PREFIX + 'EventLoop/',
USAGE: NODEJS.PREFIX + 'EventLoop/CPU/Usage'
}
var DB = {
PREFIX: 'Datastore/',
STATEMENT: 'Datastore/statement',
OPERATION: 'Datastore/operation',
INSTANCE: 'Datastore/instance',
ALL: 'Datastore/' + ALL,
WEB: 'allWeb',
OTHER: 'allOther'
}
var EXTERNAL = {
PREFIX: 'External/',
ALL: 'External/' + ALL,
WEB: 'External/allWeb',
OTHER: 'External/allOther',
APP: 'ExternalApp/',
TRANSACTION: 'ExternalTransaction/'
}
var FUNCTION = {
PREFIX: 'Function/'
}
var MIDDLEWARE = {
PREFIX: NODEJS.PREFIX + 'Middleware/'
}
var FS = {
PREFIX: 'Filesystem/'
}
var MEMCACHE = {
PREFIX: 'Memcache',
OPERATION: DB.OPERATION + '/Memcache/',
INSTANCE: DB.INSTANCE + '/Memcache/',
ALL: DB.PREFIX + 'Memcache/' + ALL
}
var MONGODB = {
PREFIX: 'MongoDB',
STATEMENT: DB.STATEMENT + '/MongoDB/',
OPERATION: DB.OPERATION + '/MongoDB/',
INSTANCE: DB.INSTANCE + '/MongoDB/'
}
var MYSQL = {
PREFIX: 'MySQL',
STATEMENT: DB.STATEMENT + '/MySQL/',
OPERATION: DB.OPERATION + '/MySQL/',
INSTANCE: DB.INSTANCE + '/MySQL/'
}
var REDIS = {
PREFIX: 'Redis',
OPERATION: DB.OPERATION + '/Redis/',
INSTANCE: DB.INSTANCE + '/Redis/',
ALL: DB.PREFIX + 'Redis/' + ALL
}
var POSTGRES = {
PREFIX: 'Postgres',
STATEMENT: DB.STATEMENT + '/Postgres/',
OPERATION: DB.OPERATION + '/Postgres/',
INSTANCE: DB.INSTANCE + '/Postgres/'
}
var CASSANDRA = {
PREFIX: 'Cassandra',
OPERATION: DB.OPERATION + '/Cassandra/',
STATEMENT: DB.STATEMENT + '/Cassandra/',
INSTANCE: DB.INSTANCE + '/Cassandra/',
ALL: DB.PREFIX + 'Cassandra/' + ALL
}
var ORACLE = {
PREFIX: 'Oracle',
STATEMENT: DB.STATEMENT + '/Oracle/',
OPERATION: DB.OPERATION + '/Oracle/',
INSTANCE: DB.INSTANCE + '/Oracle/'
}
var EXPRESS = {
PREFIX: 'Expressjs/',
MIDDLEWARE: MIDDLEWARE.PREFIX + 'Expressjs/',
ERROR_HANDLER: MIDDLEWARE.PREFIX + 'Expressjs/'
}
var RESTIFY = {
PREFIX: 'Restify/'
}
var HAPI = {
PREFIX: 'Hapi/'
}
var SUPPORTABILITY = {
PREFIX: 'Supportability/',
UNINSTRUMENTED: 'Supportability/Uninstrumented',
EVENTS: 'Supportability/Events',
API: 'Supportability/API',
UTILIZATION: 'Supportability/utilization',
DEPENDENCIES: 'Supportability/InstalledDependencies'
}
var UTILIZATION = {
AWS_ERROR: SUPPORTABILITY.UTILIZATION + '/aws/error',
DOCKER_ERROR: SUPPORTABILITY.UTILIZATION + '/docker/error'
}
var CUSTOM_EVENTS = {
PREFIX: SUPPORTABILITY.EVENTS + '/Customer/',
DROPPED: SUPPORTABILITY.EVENTS + '/Customer/Dropped',
SEEN: SUPPORTABILITY.EVENTS + '/Customer/Seen',
SENT: SUPPORTABILITY.EVENTS + '/Customer/Sent',
TOO_LARGE: SUPPORTABILITY.EVENTS + '/Customer/TooLarge',
FAILED: SUPPORTABILITY.EVENTS + '/Customer/FailedToSend'
}
var TRANSACTION_ERROR = {
SEEN: SUPPORTABILITY.EVENTS + '/TransactionError/Seen',
SENT: SUPPORTABILITY.EVENTS + '/TransactionError/Sent'
}
var WEB = {
RESPONSE_TIME: 'WebTransaction',
TOTAL_TIME: 'WebTransactionTotalTime'
}
var BACKGROUND = {
RESPONSE_TIME: 'OtherTransaction',
TOTAL_TIME: 'OtherTransactionTotalTime'
}
var TRUNCATED = {
PREFIX: 'Truncated/'
}
module.exports = {
ACTION_DELIMITER: '/',
ALL: ALL,
APDEX: 'Apdex',
BACKGROUND: BACKGROUND,
CASSANDRA: CASSANDRA,
CLIENT_APPLICATION: 'ClientApplication',
CONTROLLER: 'Controller',
CPU: CPU,
GC: GC,
CUSTOM: 'Custom',
CUSTOM_EVENTS: CUSTOM_EVENTS,
DB: DB,
ERRORS: ERRORS,
EVENTS: EVENTS,
EXPRESS: EXPRESS,
EXTERNAL: EXTERNAL,
FS: FS,
FUNCTION: FUNCTION,
HAPI: HAPI,
HTTP: 'HttpDispatcher',
LOOP: LOOP,
MEMCACHE: MEMCACHE,
MEMORY: MEMORY,
MONGODB: MONGODB,
MYSQL: MYSQL,
NORMALIZED: 'NormalizedUri',
NODEJS: NODEJS,
ORACLE: ORACLE,
POSTGRES: POSTGRES,
QUEUETIME: 'WebFrontend/QueueTime',
REDIS: REDIS,
RESTIFY: RESTIFY,
SUPPORTABILITY: SUPPORTABILITY,
TRANSACTION_ERROR: TRANSACTION_ERROR,
TRUNCATED: TRUNCATED,
URI: 'Uri',
UTILIZATION: UTILIZATION,
VIEW: VIEW,
WEB: WEB
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var EventEmitter = require('events').EventEmitter
var util = require('util')
var arrUtil = require('../util/arrays')
var logger = require('../logger').child({component: 'metric_normalizer'})
var deepEqual = require('../util/deep-equal')
var Rule = require('./normalizer/rule')
var NAMES = require('../metrics/names.js')
function url(normalized, path, config) {
if (normalized) return NAMES.NORMALIZED + normalized
if (config.enforce_backstop) {
return NAMES.NORMALIZED + '/*'
}
return NAMES.URI + path
}
function plain(normalized, path) {
if (normalized) {
return normalized
}
return path
}
/**
* The collector keeps track of rules that should be applied to metric names,
* and sends these rules to the agent at connection time. These rules can
* either change the name of the metric or indicate that metrics associated with
* this name (which is generally a URL path) should be ignored altogether.
*
* @param {object} config The agent's configuration blob, which has a parameter
* that indicates whether to enforce the normalization
* backstop.
*/
function MetricNormalizer(config, type) {
if (!config) throw new Error("normalizer must be created with configuration.")
if (!type) throw new Error("normalizer must be created with a type.")
EventEmitter.call(this)
this.config = config
this.type = type
// some mildly cheesy polymorphism to make normalizers work generically
if (type === 'URL') {
this.formatter = url
} else {
this.formatter = plain
}
this.rules = []
}
util.inherits(MetricNormalizer, EventEmitter)
// -------------------------------------------------------------------------- //
/**
* @typedef {Object} NormalizationResults
*
* @property {bool} matched - True if a rule was found that matched.
* @property {bool} ignore - True if the given input should be ignored.
* @property {string} value - The normalized input value.
*/
// -------------------------------------------------------------------------- //
/**
* Convert the raw, de-serialized JSON response into a set of
* NormalizationRules.
*
* @param object json The de-serialized JSON response sent on collector
* connection.
*/
MetricNormalizer.prototype.load = function load(json) {
if (json) {
this.rules = []
logger.debug("Received %s %s normalization rule(s) from the server",
json.length, this.type)
json.forEach(function cb_forEach(ruleJSON) {
// no need to add the same rule twice
var rule = new Rule(ruleJSON)
if (!arrUtil.find(this.rules, deepEqual.bind(null, rule))) {
this.rules.push(rule)
logger.trace("Loaded %s normalization rule: %s", this.type, rule)
}
}, this)
/* I (FLN) always forget this, so making a note: JS sort is always
* IN-PLACE, even though it returns the sorted array.
*/
this.rules.sort(function cb_sort(a, b) {
return a.precedence - b.precedence
})
logger.debug("Loaded %s %s normalization rule(s).",
this.rules.length, this.type)
}
}
/**
* Load any rules found in the configuration into a metric normalizer.
*
* Operates via side effects.
*/
MetricNormalizer.prototype.loadFromConfig = function loadFromConfig() {
var rules = this.config.rules
if (rules && rules.name && rules.name.length > 0) {
rules.name.forEach(function cb_forEach(rule) {
if (!rule.pattern) {
return logger.error(
{rule: rule},
"Simple naming rules require a pattern."
)
}
if (!rule.name) {
return logger.error(
{rule: rule},
"Simple naming rules require a replacement name."
)
}
var precedence = rule.precedence
var terminal = rule.terminate_chain
var json = {
match_expression: rule.pattern,
eval_order: (typeof precedence === 'number') ? precedence : 500,
terminate_chain: (typeof terminal === 'boolean') ? terminal : true,
replace_all: rule.replace_all,
replacement: rule.name,
ignore: false
}
// Find where the rule should be inserted and do so.
var reverse = this.config.feature_flag.reverse_naming_rules
var insert = arrUtil.findIndex(this.rules, function findRule(r) {
return reverse
? r.precedence >= json.eval_order
: r.precedence > json.eval_order
})
if (insert === -1) {
this.rules.push(new Rule(json))
} else {
this.rules.splice(insert, 0, new Rule(json))
}
}, this)
}
if (rules && rules.ignore && rules.ignore.length > 0) {
rules.ignore.forEach(function cb_forEach(pattern) {
this.addSimple(pattern)
}, this)
}
}
/**
* Add simple, user-provided rules to the head of the match list. These rules
* will always be highest precedence, always will terminate matching, and
* will always apply to the URL as a whole. If no name is provided, then
* transactions attached to the matching URLs will be ignored.
*
* - `addSimple(opts)`
* - `addSimple(pattern [, name])`
*
* @param {RegExp} pattern The pattern to rename (with capture groups).
* @param {string} [name] The name to use for the transaction.
*/
MetricNormalizer.prototype.addSimple = function addSimple(pattern, name) {
if (!pattern) return logger.error("Simple naming rules require a pattern.")
var json = {
match_expression: pattern,
eval_order: 0,
terminate_chain: true,
replace_all: false,
replacement: null,
ignore: false
}
if (name) {
json.replacement = name
} else {
json.ignore = true
}
this.rules.unshift(new Rule(json))
}
/**
* Turn a (scrubbed) URL path into partial metric name.
*
* @param {string} path - The URL path to turn into a name.
*
* @returns {NormalizationResults} - The results of normalization.
*/
MetricNormalizer.prototype.normalize = function normalize(path) {
var last = path
var length = this.rules.length
var normalized
var matched = false
var ignored = false
// Apply each of our rules in turn.
for (var i = 0; i < length; i++) {
var rule = this.rules[i]
var applied = rule.apply(last)
if (!rule.matched) {
continue
}
if (rule.ignore) {
ignored = true
} else {
matched = true
normalized = applied
// emit event when a rule is matched
// we could also include an array of matched rules in the returned map, but
// that would increase memory overhead by creating additional array
this.emit('appliedRule', rule, normalized, last)
logger.trace({rule: rule, type: this.type},
"Normalized %s to %s.", last, normalized)
last = normalized
}
if (rule.isTerminal) {
logger.trace({rule: rule}, "Terminating normalization.")
break
}
}
// Return the normalized path.
return {
matched: matched,
ignore: ignored,
value: this.formatter(normalized, path, this.config)
}
}
module.exports = MetricNormalizer
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| rule.js | 14.29% | (10 / 70) | 0% | (0 / 52) | 0% | (0 / 9) | 14.49% | (10 / 69) | |
| tx_segment.js | 11.54% | (6 / 52) | 0% | (0 / 26) | 0% | (0 / 4) | 12.5% | (6 / 48) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 | 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../../logger').child({component: 'normalizer_rule'})
/**
* JavaScript just has to do things slightly differently.
*/
var replaceReplacer = function replaceReplacer(input) {
return input.replace(/\\/g, '$')
}
/**
* Be liberal about accepting incomplete information, because we don't want
* bad rules from the collector to crash client apps. Otherwise, this is a
* fairly straightforward mapping of the concepts in metric normalization
* rules into an object form.
*
* @param {Object} json A JavaScript object literal parsed out from the JSON
* from the collector.
*/
function NormalizerRule(json) {
if (!json) {
logger.debug(
"Received incompletely specified metric normalization rule from collector."
)
json = {}
}
this.eachSegment = json.each_segment || false
this.precedence = json.eval_order || 0
this.isTerminal = json.terminate_chain || false
this.replacement = replaceReplacer(json.replacement || '$0')
this.replaceAll = json.replace_all || false
this.ignore = json.ignore || false
this.matched = false
var modifiers = 'i'
if (this.replaceAll) modifiers += 'g'
// don't allow this to fail
if (json.match_expression instanceof RegExp) {
this.pattern = _addRegExpFlags(json.match_expression, modifiers)
} else {
try {
this.pattern = new RegExp(json.match_expression || '^$', modifiers)
} catch (error) {
logger.warn(error, "Problem compiling metric normalization rule pattern.")
this.pattern = /^$/
}
}
}
/**
* Allow the higher-level functions to operate on input uniformly.
*
* @param {string} input URL to potentially be split.
*/
NormalizerRule.prototype.getSegments = function getSegments(input) {
if (this.eachSegment) {
return input.split('/')
}
return [input]
}
/**
* Check if a URL matches a rule.
*
* Does not set {NormalizerRule#matched}.
*
* @param {string} input - URL to match.
*
* @return {bool} - True if this rule matches the given input, otherwise false.
*/
NormalizerRule.prototype.matches = function matches(input) {
var segments = this.getSegments(input)
for (var i = 0; i < segments.length; ++i) {
if (this.pattern.test(segments[i])) {
return true
}
}
return false
}
/**
* Apply the substitutions, if any, to the input.
*
* Also sets {NormalizerRule#matched} to true if this rule did match the given
* input.
*
* String.split will return empty segments when the path has a leading slash or
* contains a run of slashes. Don't inadvertently substitute or drop these empty
* segments, or the normalized path will be wrong.
*
* XXX In Node v0.8 and Node v0.10, `RegExp#test` advances internal state and
* XXX tracks where it left off from the previous match. This has the side
* XXX effect that reusing the same object may cause false negatives if you do
* XXX not reset that state. The only way to reset the state is to set
* XXX `RegExp#lastIndex` to `0`.
*
* @param {string} input - URL to normalize.
*
* @return {string?} - The normalized url, or `null` if this is an ignore rule
* that matched this url.
*/
NormalizerRule.prototype.apply = function apply(input) {
// For ignore rules, just see if we match and return either `null` or the
// original input.
if (this.ignore) {
return (this.matched = this.matches(input)) ? null : input
}
this.matched = false
var result = this.getSegments(input)
.map(function applyMap(segment) {
// Discussion of why we use `lastIndex` in function documentation to
// prevent de-opt due to long function.
this.pattern.lastIndex = 0
if (segment && this.pattern.test(segment)) {
this.matched = true
return segment.replace(this.pattern, this.replacement)
}
return segment
}, this)
.join('/')
return input[0] === '/' && result[0] !== '/' ? '/' + result : result
}
NormalizerRule.prototype.toJSON = function toJSON() {
return {
eachSegment: this.eachSegment,
precedence: this.precedence,
isTerminal: this.isTerminal,
replacement: this.replacement,
replaceAll: this.replaceAll,
ignore: this.ignore,
pattern: this.pattern.source
}
}
/**
* Merges the given flags with those already in a regular expression.
*
* @param {RegExp} re - The regular expression to add flags to.
* @param {string} flags - The flags to add to the regex.
*
* @return {RegExp} - A regular expression with all the given flags added.
*/
function _addRegExpFlags(re, flags) {
var foundMissing = false
var reFlags = _getRegExpFlags(re)
for (var i = 0; i < flags.length; ++i) {
if (reFlags.indexOf(flags[i]) === -1) {
foundMissing = true
reFlags += flags[i]
}
}
return foundMissing ? new RegExp(re.source, reFlags) : re
}
/**
* Pulls all the flags for a regular expression.
*
* @param {RegExp} re - The regular expression to extract the flags of.
*
* @return {string} - The regex flags.
*/
function _getRegExpFlags(re) {
// Available in Node >6.
if ('flags' in re) {
return re.flags
}
// Remove this logic once we've deprecated Node <=4, so in 2030.
var flags = ''
if (re.global) {
flags += 'g'
}
if (re.ignoreCase) {
flags += 'i'
}
if (re.multiline) {
flags += 'm'
}
if (re.sticky) {
flags += 'y'
}
if (re.unicode) {
flags += 'u'
}
return flags
}
module.exports = NormalizerRule
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 | 1 1 1 1 1 1 | 'use strict'
var logger = require('../../logger').child({component: 'tx_segment_normalizer'})
module.exports = TxSegmentNormalizer
function TxSegmentNormalizer() {
this.terms = []
}
/**
* This normalize method is wicked. The best bet is to read the spec:
* https://newrelic.atlassian.net/wiki/pages/viewpage.action?spaceKey=eng&title=Language+agent+transaction+segment+terms+rules
*
* A copy paste of the rules that were followed:
* 1. Find the first rule where the prefix key matches the prefix of the
* transaction name. If no matching rules are found, abort.
* 2. Strip the prefix from the transaction name.
* 3. Split the rest of the transaction name into segments on slashes ('/').
* 4. For each segment:
* If the segment appears in the array of strings given under the terms key,
* keep it unchanged. Else, replace it with a placeholder ('*')
* 5. Collapse all adjacent placeholder segments into a single '*' segment.
* 6. Join together the modified segments with slashes, and re-prepend the prefix.
*
* @param {string} path - The transaction metric path to normalize.
*
* @return {NormalizationResults} - The results of normalizing the given path.
*/
TxSegmentNormalizer.prototype.normalize = function normalize(path) {
var currentTerm
var prefix
for (var i = 0; i < this.terms.length; i++) {
currentTerm = this.terms[i]
prefix = currentTerm.prefix
if (path.lastIndexOf(prefix, 0) === -1) {
continue
}
var fragment = path.slice(prefix.length)
var parts = fragment.split('/')
var result = []
var prev
var segment
for (var j = 0; j < parts.length; j++) {
segment = parts[j]
if (segment === '' && j + 1 === parts.length) break
if (currentTerm.terms.indexOf(segment) === -1) {
if (prev === '*') continue
result.push(prev = '*')
} else {
result.push(prev = segment)
}
}
logger.trace('Normalizing %s because of rule: %s', path, currentTerm)
return {
matched: true, // To match MetricNormalizer
ignore: false, // ^^
value: prefix + result.join('/')
}
}
return {
matched: false, // To match MetricNormalizer
ignore: false, // ^^
value: path
}
}
TxSegmentNormalizer.prototype.load = function load(json) {
if (Array.isArray(json)) {
this.terms = filterRules(json)
} else {
logger.warn(
'transaction_segment_terms was not an array got: %s (%s)',
typeof json,
json
)
}
}
function filterRules(rules) {
var map = {}
for (var i = 0, l = rules.length; i < l; ++i) {
var prefix = rules[i].prefix
if (!prefix || typeof prefix !== 'string') continue
if (prefix[prefix.length - 1] !== '/') {
prefix = prefix + '/'
rules[i].prefix = prefix
}
var segments = prefix.split('/')
if (segments.length !== 3 || !segments[0] || !segments[1] || segments[3]) continue
if (Array.isArray(rules[i].terms)) {
map[prefix] = rules[i]
}
}
var keys = Object.keys(map)
var filtered = new Array(keys.length)
for (i = 0, l = keys.length; i < l; ++i) {
filtered[i] = map[keys[i]]
}
return filtered
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| cassandra.js | 29.41% | (5 / 17) | 0% | (0 / 4) | 0% | (0 / 1) | 31.25% | (5 / 16) | |
| custom.js | 30% | (3 / 10) | 0% | (0 / 2) | 0% | (0 / 1) | 33.33% | (3 / 9) | |
| express.js | 22.22% | (2 / 9) | 0% | (0 / 2) | 0% | (0 / 1) | 25% | (2 / 8) | |
| generic.js | 25% | (2 / 8) | 0% | (0 / 2) | 0% | (0 / 1) | 28.57% | (2 / 7) | |
| http.js | 15.38% | (4 / 26) | 0% | (0 / 12) | 0% | (0 / 1) | 16.67% | (4 / 24) | |
| http_external.js | 13.04% | (3 / 23) | 0% | (0 / 12) | 0% | (0 / 2) | 13.04% | (3 / 23) | |
| memcached.js | 25% | (5 / 20) | 0% | (0 / 8) | 0% | (0 / 1) | 26.32% | (5 / 19) | |
| other.js | 18.75% | (3 / 16) | 0% | (0 / 6) | 0% | (0 / 1) | 20% | (3 / 15) | |
| redis.js | 25% | (5 / 20) | 0% | (0 / 8) | 0% | (0 / 1) | 26.32% | (5 / 19) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 | 1 1 1 1 1 | 'use strict'
var NAMES = require('../names')
var DB = NAMES.DB
var CASSANDRA = NAMES.CASSANDRA
function record(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var type = transaction.isWeb() ? DB.WEB : DB.OTHER
var operation = segment.name
if (scope) transaction.measure(operation, scope, duration, exclusive)
transaction.measure(operation, null, duration, exclusive)
transaction.measure(DB.PREFIX + type, null, duration, exclusive)
transaction.measure(DB.ALL, null, duration, exclusive)
transaction.measure(
DB.PREFIX + CASSANDRA.PREFIX + '/' + type,
null,
duration,
exclusive
)
transaction.measure(CASSANDRA.ALL, null, duration, exclusive)
}
// disabled until metric explosions can be handled by server
/*
if (segment.port > 0) {
var hostname = segment.host || 'localhost'
var location = hostname + ':' + segment.port
var instance = DB.INSTANCE + '/' + CASSANDRA.PREFIX + '/' + location
transaction.measure(instance, null, duration, exclusive)
}
*/
module.exports = record
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | 1 1 1 | 'use strict'
var NAMES = require('../names')
function record(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var name = NAMES.CUSTOM + NAMES.ACTION_DELIMITER + segment.name
if (scope) transaction.measure(name, scope, duration, exclusive)
transaction.measure(name, null, duration, exclusive)
}
module.exports = record
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | 1 1 | 'use strict' function record(path, segment, scope) { var duration = segment.getDurationInMillis() var exclusive = segment.getExclusiveDurationInMillis() var transaction = segment.transaction var metricName = segment.name + '/' + path if (scope) transaction.measure(metricName, scope, duration, exclusive) transaction.measure(metricName, null, duration, exclusive) } module.exports = record |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | 1 1 | 'use strict' function record(segment, scope) { var duration = segment.getDurationInMillis() var exclusive = segment.getExclusiveDurationInMillis() var transaction = segment.transaction if (scope) transaction.measure(segment.name, scope, duration, exclusive) transaction.measure(segment.name, null, duration, exclusive) } module.exports = record |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | 1 1 1 1 | 'use strict'
var NAMES = require('../../metrics/names.js')
// CONSTANTS
var TO_MILLIS = 1e3
function recordWeb(segment, scope) {
// in web metrics, scope is required
if (!scope) return
var transaction = segment.transaction
// if there was a nested webTransaction use its recorder instead
if (transaction.webSegment && segment !== transaction.webSegment) return
var duration = segment.getDurationInMillis()
var totalTime = transaction.trace.getTotalTimeDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var partial = segment.partialName
var config = segment.transaction.agent.config
// named / key transaction support requires per-name apdexT
var keyApdexInMillis = config.web_transactions_apdex[scope] * TO_MILLIS || 0
transaction.measure(NAMES.WEB.RESPONSE_TIME, null, duration, exclusive)
transaction.measure(NAMES.WEB.TOTAL_TIME, null, totalTime, exclusive)
transaction.measure(NAMES.HTTP, null, duration, exclusive)
transaction.measure(scope, null, duration, exclusive)
transaction.measure(NAMES.WEB.TOTAL_TIME + '/' + partial, null, totalTime, exclusive)
if (transaction.queueTime > 0) {
transaction.measure(NAMES.QUEUETIME, null, transaction.queueTime)
}
if (transaction.incomingCatId) {
transaction.measure(
NAMES.CLIENT_APPLICATION + '/' + transaction.incomingCatId + "/all",
null,
transaction.catResponseTime
)
}
transaction._setApdex(NAMES.APDEX + '/' + partial, duration, keyApdexInMillis)
transaction._setApdex(NAMES.APDEX, duration, keyApdexInMillis)
}
module.exports = recordWeb
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | 1 1 1 | 'use strict'
var EXTERNAL = require('../../metrics/names').EXTERNAL
function recordExternal(host, library) {
if (!host) {
throw new Error(
'External request metrics need to be associated with a host. ' +
'Not measuring.'
)
}
return function cls_recordExternal(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var metricName = EXTERNAL.PREFIX + host + '/' + library
var rollupType = transaction.isWeb() ? EXTERNAL.WEB : EXTERNAL.OTHER
var rollupHost = EXTERNAL.PREFIX + host + '/all'
if (segment.catId && segment.catTransaction) {
transaction.measure(
EXTERNAL.APP + host + '/' + segment.catId + '/all',
null,
duration,
exclusive
)
transaction.measure(
EXTERNAL.TRANSACTION + host + '/' + segment.catId + '/' + segment.catTransaction,
null,
duration,
exclusive
)
// This CAT metric replaces scoped External/{host}/{method}
if (scope) {
transaction.measure(
EXTERNAL.TRANSACTION + host + '/' + segment.catId +
'/' + segment.catTransaction,
scope,
duration,
exclusive
)
}
} else if (scope) {
transaction.measure(metricName, scope, duration, exclusive)
}
transaction.measure(metricName, null, duration, exclusive)
transaction.measure(rollupType, null, duration, exclusive)
transaction.measure(rollupHost, null, duration, exclusive)
transaction.measure(EXTERNAL.ALL, null, duration, exclusive)
}
}
module.exports = recordExternal
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | 1 1 1 1 1 | 'use strict'
var NAMES = require('../names')
var DB = NAMES.DB
var MEMCACHE = NAMES.MEMCACHE
function recordMemcache(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var type = transaction.isWeb() ? DB.WEB : DB.OTHER
var operation = segment.name
if (scope) transaction.measure(operation, scope, duration, exclusive)
transaction.measure(operation, null, duration, exclusive)
transaction.measure(DB.PREFIX + type, null, duration, exclusive)
transaction.measure(DB.PREFIX + MEMCACHE.PREFIX + '/' + type, null, duration, exclusive)
transaction.measure(DB.ALL, null, duration, exclusive)
transaction.measure(MEMCACHE.ALL, null, duration, exclusive)
// Datastore instance metrics.
if (segment.parameters.hasOwnProperty('host') &&
segment.parameters.hasOwnProperty('port_path_or_id')) {
var instanceName =
DB.INSTANCE + '/' + MEMCACHE.PREFIX + '/' + segment.parameters.host + '/' +
segment.parameters.port_path_or_id
transaction.measure(instanceName, null, duration, exclusive)
}
}
// disabled until metric explosions can be handled by server
/*
if (segment.port > 0) {
var hostname = segment.host || 'localhost'
var location = hostname + ':' + segment.port
var instance = DB.INSTANCE + '/' + MEMCACHE.PREFIX + '/' + location
transaction.measure(instance, null, duration, exclusive)
}
*/
module.exports = recordMemcache
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 | 1 1 1 | 'use strict'
var NAMES = require('../../metrics/names.js')
function recordBackground(segment, scope) {
// if there was a nested otherTransaction use its recorder instead
var transaction = segment.transaction
if (transaction.bgSegment && segment !== transaction.bgSegment) return
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var totalTime = segment.transaction.trace.getTotalTimeDurationInMillis()
var group = segment.partialName
var name = group + '/' + segment.name
if (scope) {
transaction.measure(scope, null, duration, exclusive)
transaction.measure(
NAMES.BACKGROUND.TOTAL_TIME + '/' + name,
null,
totalTime,
exclusive
)
}
// rollup for background total time doesn't have `/all` where the response
// time version does.
transaction.measure(
NAMES.BACKGROUND.RESPONSE_TIME + '/all',
null,
duration,
exclusive
)
transaction.measure(NAMES.BACKGROUND.TOTAL_TIME, null, totalTime, exclusive)
}
module.exports = recordBackground
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | 1 1 1 1 1 | 'use strict'
var NAMES = require('../names')
var DB = NAMES.DB
var REDIS = NAMES.REDIS
function recordRedis(segment, scope) {
var duration = segment.getDurationInMillis()
var exclusive = segment.getExclusiveDurationInMillis()
var transaction = segment.transaction
var type = transaction.isWeb() ? DB.WEB : DB.OTHER
var operation = segment.name
if (scope) transaction.measure(operation, scope, duration, exclusive)
transaction.measure(operation, null, duration, exclusive)
transaction.measure(DB.PREFIX + type, null, duration, exclusive)
transaction.measure(DB.PREFIX + REDIS.PREFIX + '/' + type, null, duration, exclusive)
transaction.measure(DB.ALL, null, duration, exclusive)
transaction.measure(REDIS.ALL, null, duration, exclusive)
// Datastore instance metrics.
if (segment.parameters.hasOwnProperty('host') &&
segment.parameters.hasOwnProperty('port_path_or_id')) {
var instanceName =
DB.INSTANCE + '/' + REDIS.PREFIX + '/' + segment.parameters.host + '/' +
segment.parameters.port_path_or_id
transaction.measure(instanceName, null, duration, exclusive)
}
}
// disabled until metric explosions can be handled by server
/*
if (segment.port > 0) {
var hostname = segment.host || 'localhost'
var location = hostname + ':' + segment.port
var instance = DB.INSTANCE + '/' + REDIS.PREFIX + '/' + location
transaction.measure(instance, null, duration, exclusive)
}
*/
module.exports = recordRedis
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 | 1 1 1 1 1 1 1 1 | 'use strict' /* * * CONSTANTS * */ var FROM_MILLIS = 1e-3 function ApdexStats(apdexT) { if (!apdexT && apdexT !== 0) { throw new Error('Apdex summary must be created with apdexT.') } this.apdexT = apdexT this.satisfying = 0 this.tolerating = 0 this.frustrating = 0 } ApdexStats.prototype.recordValue = function recordValue(time, overrideApdex) { var apdexT = overrideApdex || this.apdexT if (time <= apdexT) { ++this.satisfying } else if (time <= 4 * apdexT) { ++this.tolerating } else { ++this.frustrating } } ApdexStats.prototype.recordValueInMillis = function recordValueInMillis(timeInMillis, overrideApdex) { this.recordValue(timeInMillis * FROM_MILLIS, overrideApdex * FROM_MILLIS) } /** * Used by the error handler to indicate that a user was frustrated by a page * error. */ ApdexStats.prototype.incrementFrustrating = function incrementFrustrating() { ++this.frustrating } /** * When merging apdex stastics, the apdex tolerating value isn't brought along * for the ride. * * @param {ApdexStats} other The existing apdex stats being merged in. */ ApdexStats.prototype.merge = function merge(other) { this.satisfying += other.satisfying this.tolerating += other.tolerating this.frustrating += other.frustrating } /** * This feels dirty: ApdexStats override the ordinary statistics serialization * format by putting satisfying, tolerating and frustrating values in the * first three fields in the array and setting the next two to the apdex (used * by calculations inside RPM), followed by 0. * * @returns {Array} A six-value array where only the first three values are * significant: satisfying, tolerating, and frustrating * load times, respectively. */ ApdexStats.prototype.toJSON = function toJSON() { return [ this.satisfying, this.tolerating, this.frustrating, this.apdexT, this.apdexT, 0 ] } module.exports = ApdexStats |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 | 1 1 1 1 1 1 1 1 1 1 1 | 'use strict' /* * * CONSTANTS * */ var BYTES_PER_MB = 1024 * 1024 var FROM_MILLIS = 1e-3 /** * Simple container for tracking running statistics for a metric. */ function Stats() { this.total = 0 this.totalExclusive = 0 this.min = 0 this.max = 0 this.sumOfSquares = 0 this.callCount = 0 } /** * Update the summary statistics with a new value. * * @param {Number} totalTime Time, in seconds, of the measurement. * @param {Number} exclusiveTime Time that was taken by only the * current measurement (optional). */ Stats.prototype.recordValue = function recordValue(totalTime, exclusiveTime) { // even if a caller messes up, don't break everything else if (totalTime !== 0 && !totalTime) totalTime = 0 if (exclusiveTime !== 0 && !exclusiveTime) exclusiveTime = totalTime if (this.callCount > 0) { this.min = Math.min(totalTime, this.min) } else { this.min = totalTime } this.max = Math.max(totalTime, this.max) this.sumOfSquares += (totalTime * totalTime) this.callCount += 1 this.total += totalTime this.totalExclusive += exclusiveTime } /** * Until the collector accepts statistics in milliseconds, this code is going * to have some hinky floating-point values to deal with. */ Stats.prototype.recordValueInMillis = recordValueInMillis function recordValueInMillis(totalTime, exclusiveTime) { this.recordValue( totalTime * FROM_MILLIS, exclusiveTime >= 0 ? exclusiveTime * FROM_MILLIS : null ) } /** * Really? * * FIXME: Really? */ Stats.prototype.recordValueInBytes = function recordValueInBytes(bytes, exclusiveBytes) { exclusiveBytes = exclusiveBytes || bytes this.recordValue(bytes / BYTES_PER_MB, exclusiveBytes / BYTES_PER_MB) } Stats.prototype.incrementCallCount = function incrementCallCount(count) { if (typeof count === 'undefined') count = 1 this.callCount += count } /** * Fold another summary's statistics into this one. */ Stats.prototype.merge = function merge(other) { if (other.count && !other.callCount) { other.callCount = other.count } if (other.totalExclusive == null) { other.totalExclusive = other.total } if (other.callCount > 0) { if (this.callCount > 0) { this.min = Math.min(this.min, other.min) } else { this.min = other.min } } this.max = Math.max(this.max, other.max) this.total += other.total this.totalExclusive += other.totalExclusive this.sumOfSquares += other.sumOfSquares this.callCount += other.callCount } /** * The serializer relies upon this representation, so don't change the * values, cardinality, or ordering of this array without ensuring that * it matches the version of the "protocol" being sent to the collector. * * @returns {Array} Number of calls, * total time in seconds, * time for this metric alone in seconds, * shortest individual time in seconds, * longest individual time in seconds, * running sum of squares. */ Stats.prototype.toJSON = function toJSON() { return [ this.callCount, this.total, this.totalExclusive, this.min, this.max, this.sumOfSquares ] } module.exports = Stats |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| name-state.js | 26.32% | (15 / 57) | 0% | (0 / 34) | 0% | (0 / 13) | 26.79% | (15 / 56) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../logger.js').child({component: 'name-state'})
var arrayUtil = require('../util/arrays')
/**
* Manages transaction names using a stack of paths.
*
* @constructor
*/
function NameState(prefix, verb, delimiter, path) {
this.setName(prefix, verb, delimiter, path)
}
NameState.prototype.setName = function setName(prefix, verb, delimiter, path) {
this.setPrefix(prefix)
this.verb = verb
this.delimiter = delimiter
this.pathStack = path ? [path] : []
logger.trace('setName called on name state, path stack now %j', this.pathStack)
}
/**
* Sets the metric prefix (i.e. Expressjs).
*/
NameState.prototype.setPrefix = function setPrefix(prefix) {
if (prefix === null) {
this.prefix = null
return
}
this.prefix = (prefix[prefix.length - 1] === '/') ?
prefix.substring(0, prefix.length - 1) : prefix
}
/**
* Sets the HTTP verb (i.e. GET/POST/PUT)
*/
NameState.prototype.setVerb = function setVerb(verb) {
this.verb = verb
}
/**
* Sets the delimiter character used to separate the http verb from the path.
*/
NameState.prototype.setDelimiter = function setDelimiter(delimiter) {
this.delimiter = delimiter
}
/**
* Pushes a new path element onto the naming stack.
*/
NameState.prototype.appendPath = function appendPath(path) {
if (path) {
var strPath = path instanceof RegExp ? path.source : String(path)
this.pathStack.push(strPath)
logger.trace('Appended %s to path stack', strPath)
}
}
/**
* Pushes a new path element onto the naming stack if the stack is
* empty.
*/
NameState.prototype.appendPathIfEmpty = function appendPathIfEmpty(path) {
if (path && this.pathStack.length === 0) {
var strPath = path instanceof RegExp ? path.source : String(path)
this.pathStack.push(strPath)
logger.trace('Appended %s to path stack', strPath)
}
}
/**
* Pops the last element off the name stack.
*
* If `path` is provided, the stack is popped back to the first element matching
* `path`. If no element matches, the stack is left unchanged.
*
* @param {string} [path] - Optional. A path piece to pop back to.
*/
NameState.prototype.popPath = function popPath(path) {
if (this.pathStack.length === 0) {
return
}
if (path) {
var idx = arrayUtil.findLastIndex(this.pathStack, function pathMatch(a) {
return a === path
})
if (idx !== -1) {
this.pathStack.splice(idx)
}
} else {
this.pathStack.pop()
}
}
NameState.prototype.getName = function getName() {
if (this.pathStack.length === 0) return null // nameState initialized but never set
var path = this.pathStack.join('/').replace(/[/]{2,}/g, '/')
if (path && path[0] !== '/') {
path = '/' + path
} // path now looks like /one/two/three
return _getName(this, path)
}
NameState.prototype.getNameNotFound = function getNameNotFound() {
return _getName(this, '(not found)')
}
NameState.prototype.reset = function reset() {
logger.trace('Reset called on name state, path stack was %j', this.pathStack)
this.prefix = null
this.verb = null
this.delimiter = null
this.pathStack = []
}
function _getName(nameState, path) {
var verb = nameState.verb ? '/' + nameState.verb : ''
return (nameState.prefix || '') + verb + (nameState.delimiter || '') + path
}
module.exports = NameState
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| aggregator.js | 11.11% | (10 / 90) | 0% | (0 / 62) | 0% | (0 / 8) | 11.9% | (10 / 84) | |
| segment.js | 20.12% | (33 / 164) | 0% | (0 / 66) | 0% | (0 / 21) | 21.02% | (33 / 157) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 | 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../../logger').child({component: 'trace-aggregator'})
/*
*
* CONSTANTS
*
*/
var TO_MILLIS = 1e3
/**
* Locus for the complicated logic surrounding the selection of slow
* transaction traces for submission to the collector.
*
* @param {object} config Dictionary containing transaction tracing
* parameters. Required.
*/
function TraceAggregator(config) {
if (!config) throw new Error("Trace aggregator needs configuration at creation.")
/*eslint-disable */
/*
* From
*
* https://newrelic.atlassian.net/wiki/display/eng/Transaction+Trace+Collection+Improvements
*
* 5 Transaction Trace Guarantee
*
* For the initial experience problem, the Agent will sample up to 1
* transaction per minute until it has sampled 5 transactions. This
* guarantees that the agent will always report some transaction traces.
* There is no time out for this sampling period - the agent always
* samples until it has collected 5 transactions. The agent doesn't
* simply report the first 5 transactions that it sees because it's
* likely (particularly for a local dev test) that all 5 transactions
* would be associated with one request (a single web page and its
* resources).
*/
/*eslint-enable */
this.reported = 0
this.config = config
// Setting up top n capacity.
this.capacity = 1
if (config.transaction_tracer &&
config.transaction_tracer.top_n) {
this.capacity = config.transaction_tracer.top_n
}
// hidden class optimization
this.trace = null
this.syntheticsTraces = []
this.requestTimes = {}
this.noTraceSubmitted = 0
}
/**
* For every five harvest cycles (or "minutes"), if no new slow transactions
* have been added, reset the requestTime match and allow a new set of five
* to start populating the Top N Slow Trace list.
*/
TraceAggregator.prototype.resetTimingTracker = function resetTT() {
this.requestTimes = {}
this.noTraceSubmitted = 0
}
/**
* Add a trace to the slow trace list, if and only if it fulfills the necessary
* criteria.
*
* @param {Transaction} transaction The transaction, which we need to check
* apdexT, as well as getting the trace.
*/
TraceAggregator.prototype.add = function add(transaction) {
if (this.config.collect_traces &&
this.config.transaction_tracer && this.config.transaction_tracer.enabled &&
transaction && transaction.metrics) {
var trace = transaction.trace
var name = transaction.name
var duration = trace.getDurationInMillis()
var apdexT = transaction.metrics.apdexT
if (transaction.syntheticsData && this.syntheticsTraces.length < 20) {
this.syntheticsTraces.push(trace)
} else if (this.isBetter(name, duration, apdexT)) {
this.trace = trace
// because of the "first 5" rule, this may or may not be the slowest
if (!this.requestTimes[name] || this.requestTimes[name] < duration) {
this.requestTimes[name] = duration
}
}
this.config.measureInternal('Transaction/Count', duration)
}
}
/**
* If there's a slow trace to be sent, encode it and pass it along
* to the callback, otherwise update the relevant trace diversity settings.
*
* @param Function callback The receiver of the encoded trace or errors.
*/
TraceAggregator.prototype.harvest = function harvest(callback) {
var tracesToAggregate = 0
var encodedTraces = []
var errored = false
var normalTrace = null
// Synthetics
for (var i = 0, len = this.syntheticsTraces.length; i < len; ++i) {
this.syntheticsTraces[i].generateJSON(resultAggregator)
tracesToAggregate++
}
if (this.trace) {
var max = this.trace.transaction.agent.config.max_trace_segments
if (this.trace.segmentsSeen > max) {
logger.warn(
'transaction %s contained %d segments, only collecting the first %d',
this.trace.transaction.name,
this.trace.segmentsSeen,
max
)
}
normalTrace = this.trace
this.noTraceSubmitted = 0
this.trace.generateJSON(resultAggregator)
tracesToAggregate++
} else {
this.noTraceSubmitted++
if (this.noTraceSubmitted >= 5) this.resetTimingTracker()
}
if (tracesToAggregate === 0) {
process.nextTick(function cb_nextTick() {
callback(null, null, null)
})
}
function resultAggregator(err, encoded) {
if (errored) {
return
}
if (err) {
errored = true
callback(err)
}
encodedTraces.push(encoded)
if (encodedTraces.length === tracesToAggregate) {
callback(null, encodedTraces, normalTrace)
}
}
}
/**
* Reset the trace diversity settings after a successful harvest.
*
* @param {Trace} trace Because the harvest cycle can take a while,
* it's possible a better trace came along
* in the window between the start and end of
* the harvest cycle, so don't throw that away.
*/
TraceAggregator.prototype.reset = function reset(trace) {
this.reported++
if (trace === this.trace) this.trace = null
this.syntheticsTraces = []
}
/*eslint-disable */
/**
* Determine whether a new trace is more worth keeping than an old one.
* This gets called on every single transactionFinished event, so return as
* quickly as possible and call as few external functions as possible. On the
* converse, there's some complicated logic here, so spell things out.
*
* All specifications are from
* https://newrelic.atlassian.net/wiki/display/eng/Transaction+Trace+Collection+Improvements
*
* @param {string} name Name of this transaction's key metric.
* @param {number} duration Time the transaction took, in milliseconds.
* @param {number} apdexT Apdex tolerating threshold, in seconds.
*/
/*eslint-enable */
TraceAggregator.prototype.isBetter = function isBetter(name, duration, apdexT) {
/* 1. If the transaction duration is below the tracing threshold, the
* transaction is skipped.
*
* The threshold for slow traces defaults to apdex_f, which is 4 * apdex_t.
*/
var config = this.config.transaction_tracer
var isOverThreshold
if (config &&
config.transaction_threshold &&
config.transaction_threshold !== 'apdex_f' &&
typeof config.transaction_threshold === 'number') {
isOverThreshold = duration > config.transaction_threshold * TO_MILLIS
} else {
isOverThreshold = duration > 4 * TO_MILLIS * apdexT
}
if (!isOverThreshold) return false
/* 2. If the transaction duration is less than the duration of the current
* slow transaction, the transaction is skipped.
*/
var slowerThanExisting = true
if (this.trace) {
slowerThanExisting = this.trace.getDurationInMillis() < duration
}
if (!slowerThanExisting) return false
/* We always gather some slow transactions at the start, regardless of
* the size of Top N. This changes the behavior of the rest of the
* decision-making process in some subtle ways.
*/
var hasMetGuarantee = this.reported >= 5
/* 3. If the transaction's name is in the transaction map and its duration
* is less than the response time in the map, it is skipped.
*/
var slowerThanCaptured = true
if (hasMetGuarantee) {
if (this.requestTimes[name]) {
slowerThanCaptured = this.requestTimes[name] < duration
}
}
if (!slowerThanCaptured) return false
/* Not part of enumerated rules, but necessary for Top N support:
* Ensure this name is either already in the request time map
* or that the map still hasn't hit capacity.
*/
if (hasMetGuarantee &&
!this.requestTimes[name] &&
Object.keys(this.requestTimes).length >= this.capacity) {
return false
}
/* 4. The transaction is held as the slowest transaction.
*/
return true
}
module.exports = TraceAggregator
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var logger = require('../../logger').child({component: 'segment'})
var util = require('util')
var urltils = require('../../util/urltils.js')
var sumChildren = require('../../util/sum-children')
var Timer = require('../../timer')
var INSTANCE_UNKNOWN = 'unknown'
var STATE = {
EXTERNAL: 'EXTERNAL',
CALLBACK: 'CALLBACK'
}
/**
* Initializes the segment and binds the recorder to itself, if provided.
*
* @constructor
* @classdesc
* TraceSegments are inserted to track instrumented function calls. Each one is
* bound to a transaction, given a name (used only internally to the framework
* for now), and has one or more children (that are also part of the same
* transaction), as well as an associated timer.
*
* @param {Transaction} transaction
* The transaction to which this segment will be bound.
*
* @param {string} name
* Human-readable name for this segment (e.g. 'http', 'net', 'express',
* 'mysql', etc).
*
* @param {?function} recorder
* Callback that takes a segment and a scope name as parameters (intended to be
* used to record metrics related to the segment).
*/
function TraceSegment(transaction, name, recorder) {
if (!transaction) throw new Error('All segment must be associated with a transaction.')
if (!name) throw new Error('All segment must be named')
this.name = name
this.transaction = transaction
transaction.numSegments++
transaction.agent.totalActiveSegments++
transaction.agent.segmentsCreatedInHarvest++
if (recorder) {
transaction.addRecorder(recorder.bind(null, this))
}
this.parameters = {nr_exclusive_duration_millis: null}
this.children = []
this.timer = new Timer()
// hidden class optimization
this.partialName = null
this._exclusiveDuration = null
this._collect = true
this.host = null
this.port = null
this.state = STATE.EXTERNAL
this.async = true
this.ignore = false
this.probe('new TraceSegment')
}
/**
* @param {string} host
* The name of the host of the database. This will be normalized if the string
* represents localhost.
*
* @param {string|number} port
* The database's port, path to unix socket, or id.
*
* @param {string|number|bool} database
* The name or ID of the database that was connected to. Or `false` if there is
* no database name (i.e. Redis has no databases, only hosts).
*/
TraceSegment.prototype.captureDBInstanceAttributes =
function captureDBInstanceAttributes(host, port, database) {
var config = this.transaction.agent.config
var dsTracerConf = config.datastore_tracer
// Add database name if provided and enabled.
if (database !== false && dsTracerConf.database_name_reporting.enabled) {
this.parameters.database_name =
typeof database === 'number' ? database : (database || INSTANCE_UNKNOWN)
}
// Add instance information if enabled.
if (dsTracerConf.instance_reporting.enabled) {
// Determine appropriate defaults for host and port.
port = port || INSTANCE_UNKNOWN
if (host && urltils.isLocalhost(host)) {
host = config.getHostnameSafe(host)
}
if (!host || host === 'UNKNOWN_BOX') { // Config's default name of a host.
host = INSTANCE_UNKNOWN
}
this.parameters.host = host
this.parameters.port_path_or_id = String(port)
}
}
TraceSegment.prototype.moveToCallbackState = function moveToCallbackState() {
this.state = STATE.CALLBACK
}
TraceSegment.prototype.isInCallbackState = function isInCallbackState() {
return this.state === STATE.CALLBACK
}
TraceSegment.prototype.probe = function probe(action) {
if (this.transaction.traceStacks) {
this.transaction.probe(action, {segment: this.name})
}
}
/**
* Once a transaction is named, the web segment also needs to be updated to
* match it (which implies this method must be called subsequent to
* transaction.setName). To properly name apdex metrics during metric
* recording, it's also necessary to copy the transaction's partial name. And
* finally, marking the trace segment as being a web segment copies the
* segment's parameters onto the transaction.
*
* @param {string} rawURL The URL, as it came in, for parameter extraction.
*/
TraceSegment.prototype.markAsWeb = function markAsWeb(rawURL) {
var transaction = this.transaction
// transaction name and web segment name must match
this.name = transaction.name
// partialName is used to name apdex metrics when recording
this.partialName = transaction._partialName
var config = transaction.agent.config
// Copy params object so we can modify it before applying it
// multiple params places. It eventually runs through copyParameters
// so I'm not worried about `ignored_params` or `capture_params`.
var params = util._extend({}, this.parameters)
// This shouldn't be moved from the segment to the trace, so remove it.
delete params.nr_exclusive_duration_millis
// Because we are assured we have the URL here, lets grab query
// params. We want to opt for keeping the keys that are already on
// params, so we use copyParameters
urltils.copyParameters(config, urltils.parseParameters(rawURL), params)
urltils.copyParameters(config, params, this.parameters)
urltils.copyParameters(config, params, this.transaction.trace.parameters)
}
/**
* A segment attached to something evented (such as a database
* cursor) just finished an action, so set the timer to mark
* the timer as having a stop time.
*/
TraceSegment.prototype.touch = function touch() {
this.probe('Touched')
this.timer.touch()
this._updateRootTimer()
}
TraceSegment.prototype.overwriteDurationInMillis = overwriteDurationInMillis
function overwriteDurationInMillis(duration, start) {
this.timer.overwriteDurationInMillis(duration, start)
}
TraceSegment.prototype.start = function start() {
this.timer.begin()
}
/**
* Stop timing the related action.
*/
TraceSegment.prototype.end = function end() {
if (!this.timer.isActive()) return
this.probe('Ended')
this.timer.end()
this._updateRootTimer()
}
/**
* Helper to set the end of the root timer to this segment's root if it is later
* in time.
*/
TraceSegment.prototype._updateRootTimer = function _updateRootTimer() {
var root = this.transaction.trace.root
if (this.timer.endsAfter(root.timer)) {
var newDuration = (
this.timer.start +
this.getDurationInMillis() -
root.timer.start
)
root.overwriteDurationInMillis(newDuration)
}
}
/**
* Test to see if underlying timer is still active
*
* @returns {boolean} true if no longer active, else false.
*/
TraceSegment.prototype._isEnded = function _isEnded() {
return !this.timer.isActive() || this.timer.touched
}
/**
* Add a new segment to a scope implicitly bounded by this segment.
*
* @param {string} childName New human-readable name for the segment.
* @returns {TraceSegment} New nested TraceSegment.
*/
TraceSegment.prototype.add = function add(childName, recorder) {
logger.trace('Adding segment %s to %s', childName, this.name)
var segment = new TraceSegment(this.transaction, childName, recorder)
var config = this.transaction.agent.config
if (this.transaction.trace.segmentsSeen++ >= config.max_trace_segments) {
segment._collect = false
}
this.children.push(segment)
if (config.debug && config.debug.double_linked_transactions) {
segment.parent = this
}
return segment
}
/**
* Set the duration of the segment explicitly.
*
* @param {Number} duration Duration in milliseconds.
*/
TraceSegment.prototype.setDurationInMillis = setDurationInMillis
function setDurationInMillis(duration, start) {
this.timer.setDurationInMillis(duration, start)
}
TraceSegment.prototype.getDurationInMillis = function getDurationInMillis() {
return this.timer.getDurationInMillis()
}
/**
* Only for testing!
*
* @param {number} duration Milliseconds of exclusive duration.
*/
TraceSegment.prototype._setExclusiveDurationInMillis = _setExclusiveDurationInMillis
function _setExclusiveDurationInMillis(duration) {
this._exclusiveDuration = duration
}
/**
* The duration of the transaction trace tree that only this level accounts
* for.
*
* @return {integer} The amount of time the trace took, minus any child
* segments, in milliseconds.
*/
TraceSegment.prototype.getExclusiveDurationInMillis = getExclusiveDurationInMillis
function getExclusiveDurationInMillis() {
if (this._exclusiveDuration) return this._exclusiveDuration
var total = this.getDurationInMillis()
var end = this.timer.toRange()[1]
if (this.children.length > 0) {
// convert the list of start, duration pairs to start, end pairs
total -= sumChildren(this._getChildPairs(end), end)
}
return total
}
TraceSegment.prototype.getChildren = function getChildren() {
var children = []
for (var i = 0, len = this.children.length; i < len; ++i) {
if (!this.children[i].ignore) {
children.push(this.children[i])
}
}
return children
}
TraceSegment.prototype.getCollectedChildren = function getCollectedChildren() {
var children = []
for (var i = 0, len = this.children.length; i < len; ++i) {
if (this.children[i]._collect && !this.children[i].ignore) {
children.push(this.children[i])
}
}
return children
}
/**
* Enumerate the timings of this segment's descendants.
*
* @param {Number} end The end of this segment, to keep the calculated
* duration from exceeding the duration of the
* parent. Defaults to Infinity.
*
* @returns {Array} Unsorted list of [start, end] pairs, with no pair
* having an end greater than the passed in end time.
*/
TraceSegment.prototype._getChildPairs = function _getChildPairs(end) {
// quick optimization
if (this.children.length < 1) return []
if (!end) end = Infinity
var children = this.getChildren()
var childPairs = []
while (children.length) {
var child = children.pop()
var pair = child.timer.toRange()
if (pair[0] >= end) continue
children = children.concat(child.getChildren())
pair[1] = Math.min(pair[1], end)
childPairs.push(pair)
}
return childPairs
}
/**
* This is perhaps the most poorly-documented element of transaction traces:
* what do each of the segment representations look like prior to encoding?
* Spelunking in the code for the other agents has revealed that each child
* node is an array with the following field in the following order:
*
* 0: entry timestamp relative to transaction start time
* 1: exit timestamp
* 2: metric name
* 3: parameters as a name -> value JSON dictionary
* 4: any child segments
*
* Other agents include further fields in this. I haven't gotten to the bottom
* of all of them (and Ruby, of course, sends marshalled Ruby object), but
* here's what I know so far:
*
* in Java:
* 5: class name
* 6: method name
*
* in Python:
* 5: a "label"
*
* FIXME: I don't know if it makes sense to add custom fields for Node. TBD
*/
TraceSegment.prototype.toJSON = function toJSON() {
// use depth-first search on the segment tree using stack
var segmentsToProcess = [this]
// used to keep track of the last parent to add child JSONs to, it will hold
// pairs of the parent serialized segment and number of children it is expecting
// to have added
var parentStack = []
var resultTreeJson = null
while (segmentsToProcess.length !== 0) {
var segment = segmentsToProcess.pop()
if (!segment.parameters.nr_exclusive_duration_millis) {
segment.parameters.nr_exclusive_duration_millis =
segment.getExclusiveDurationInMillis()
}
var start = segment.timer.startedRelativeTo(segment.transaction.trace.root.timer)
var duration = segment.getDurationInMillis()
var segmentChildren = segment.getCollectedChildren()
var serializedSegment = [
start,
start + duration,
segment.name,
segment.parameters,
new Array(segmentChildren.length)
]
if (resultTreeJson === null) {
resultTreeJson = serializedSegment
}
if (parentStack.length !== 0) {
// get last visited parent
var parent = parentStack[parentStack.length - 1]
var parentChildren = parent[0][4]
var childIndex = --parent[1]
parentChildren[childIndex] = serializedSegment
// if the parent received all its children data, remove the parent from the stack
if (childIndex === 0) {
parentStack.pop()
}
}
if (segmentChildren.length) {
parentStack.push([serializedSegment, segmentChildren.length])
segmentsToProcess = segmentsToProcess.concat(segmentChildren)
}
}
return resultTreeJson
}
module.exports = TraceSegment
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| outbound.js | 17.5% | (14 / 80) | 0% | (0 / 38) | 0% | (0 / 10) | 18.18% | (14 / 77) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var events = require('events')
var recordExternal = require('../../../metrics/recorders/http_external')
var NAMES = require('../../../metrics/names')
var urltils = require('../../../util/urltils')
var hashes = require('../../../util/hashes')
var logger = require('../../../logger').child({component: 'outbound'})
var shimmer = require('../../../shimmer')
var DEFAULT_PORT = 80
module.exports = function instrumentOutbound(agent, hostname, port, makeRequest) {
if (!hostname) throw new Error('hostname must be defined!')
if (!port || port < 1) throw new Error('port must be defined!')
if (port && port !== DEFAULT_PORT) hostname = hostname + ':' + port
var transaction = agent.tracer.getTransaction()
var name = NAMES.EXTERNAL.PREFIX + hostname
return agent.tracer.addSegment(
name,
recordExternal(hostname, 'http'),
null,
false,
instrumentRequest
)
function instrumentRequest(segment) {
segment.start()
var request = makeRequest()
var parsed = urltils.scrubAndParseParameters(request.path)
segment.name += parsed.path
urltils.copyParameters(agent.config, parsed.parameters, segment.parameters)
// Wrap the emit method. We're doing a special wrapper instead of using
// `tracer.bindEmitter` because we want to do some logic based on certain
// events.
shimmer.wrapMethod(request, 'request.emit', 'emit', function wrapEmit(emit) {
var boundEmit = agent.tracer.bindFunction(emit, segment)
return function wrappedRequestEmit(evnt, arg) {
if (evnt === 'error') {
segment.end()
if (handleError(request, arg)) {
return // FIXME In v2 we should always call emit.
}
} else if (evnt === 'response') {
handleResponse(segment, request, arg)
}
return boundEmit.apply(this, arguments)
}
})
return request
}
function handleError(req, error) {
if (listenerCount(req, 'error') > 0) {
logger.trace(
error,
'Not capturing outbound error because user has already handled it.'
)
return false
}
/* we should be calling request.emit('error', error) here. We currently
* do not do this because the agent has historically swallowed these
* errors, re enabling them may cause unexpected errors to buble up in
* code that depends on this behavior.
*/
logger.trace(
error,
'Captured outbound error on behalf of the user (normally an uncaught exception).'
)
agent.errors.add(transaction, error)
return true
}
function handleResponse(segment, req, res) {
// FLAG: cat
if (agent.config.feature_flag.cat) {
pullCatHeaders(
agent.config,
segment,
hostname,
res.headers['x-newrelic-app-data']
)
}
// Again a custom emit wrapper because we want to watch for the `end` event.
shimmer.wrapMethod(res, 'response.emit', 'emit', function wrapEmit(emit) {
var boundEmit = agent.tracer.bindFunction(emit, segment)
return function wrappedResponseEmit(evnt) {
if (evnt === 'end') {
segment.end()
}
return boundEmit.apply(this, arguments)
}
})
}
}
function pullCatHeaders(config, segment, host, obfAppData) {
if (!config.encoding_key) {
logger.trace('config.encoding_key is not set - not parsing response CAT headers')
return
}
if (!config.trusted_account_ids) {
logger.trace(
'config.trusted_account_ids is not set - not parsing response CAT headers'
)
return
}
// is our downstream request CAT-aware?
if (!obfAppData) {
logger.trace('Got no CAT app data in response header x-newrelic-app-data')
} else {
var appData = null
try {
appData = JSON.parse(hashes.deobfuscateNameUsingKey(obfAppData,
config.encoding_key))
} catch (e) {
logger.warn('Got an unparsable CAT header x-newrelic-app-data: %s', obfAppData)
return
}
// Make sure it is a trusted account
if (appData.length && typeof appData[0] === 'string') {
var accountId = appData[0].split('#')[0]
accountId = parseInt(accountId, 10)
if (config.trusted_account_ids.indexOf(accountId) === -1) {
logger.trace('Response from untrusted CAT header account id: %s', accountId)
} else {
segment.catId = appData[0]
segment.catTransaction = appData[1]
segment.name = NAMES.EXTERNAL.TRANSACTION + host + '/' +
segment.catId + '/' + segment.catTransaction
if (appData.length >= 6) {
segment.parameters.transaction_guid = appData[5]
}
logger.trace('Got inbound response CAT headers in transaction %s',
segment.transaction.id)
}
}
}
}
function listenerCount(emitter, evnt) {
if (events.EventEmitter.listenerCount) {
return events.EventEmitter.listenerCount(emitter, evnt)
}
return emitter.listeners(evnt).length
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| arrays.js | 36.36% | (8 / 22) | 0% | (0 / 8) | 0% | (0 / 4) | 36.36% | (8 / 22) | |
| cat.js | 26.67% | (8 / 30) | 0% | (0 / 16) | 0% | (0 / 3) | 26.67% | (8 / 30) | |
| codec.js | 20% | (3 / 15) | 0% | (0 / 4) | 0% | (0 / 4) | 23.08% | (3 / 13) | |
| copy.js | 28.57% | (2 / 7) | 0% | (0 / 4) | 0% | (0 / 1) | 28.57% | (2 / 7) | |
| deep-equal.js | 7.27% | (4 / 55) | 0% | (0 / 49) | 0% | (0 / 4) | 9.76% | (4 / 41) | |
| flatten.js | 90% | (9 / 10) | 83.33% | (5 / 6) | 100% | (1 / 1) | 88.89% | (8 / 9) | |
| hashes.js | 33.33% | (10 / 30) | 0% | (0 / 2) | 0% | (0 / 5) | 33.33% | (10 / 30) | |
| label-parser.js | 16.18% | (11 / 68) | 0% | (0 / 33) | 0% | (0 / 7) | 16.18% | (11 / 68) | |
| logger.js | 61.24% | (79 / 129) | 37.5% | (24 / 64) | 62.5% | (10 / 16) | 64.46% | (78 / 121) | |
| safe-json.js | 36.36% | (4 / 11) | 0% | (0 / 2) | 33.33% | (1 / 3) | 36.36% | (4 / 11) | |
| stream-sink.js | 30.77% | (8 / 26) | 0% | (0 / 2) | 0% | (0 / 5) | 30.77% | (8 / 26) | |
| sum-children.js | 9.09% | (2 / 22) | 0% | (0 / 10) | 0% | (0 / 2) | 10% | (2 / 20) | |
| urltils.js | 8.16% | (4 / 49) | 0% | (0 / 44) | 0% | (0 / 9) | 8.51% | (4 / 47) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | 1 1 1 1 1 1 1 1 | 'use strict'
exports.find = arrayFind
exports.findLast = arrayFindLast
exports.findIndex = arrayFindIndex
exports.findLastIndex = arrayFindLastIndex
/**
* Finds the first element in an array that `pred` matches.
*
* Remove once Node v0.10, v0.12, v1, v2, and v3 are no longer supported.
*
* @deprecated With Node.js v4
*
* @param {Array} arr - The array to search.
* @param {Function} pred - A predicate function which returns `true` on matches.
* @param {*} [ctx] - The `this` arg for `pred`.
*
* @return {*?} - The first matching item if found, otherwise `undefined`.
*/
function arrayFind(arr, pred, ctx) {
var idx = arrayFindIndex(arr, pred, ctx)
if (idx >= 0) {
return arr[idx]
}
}
/**
* Finds the last element in an array that `pred` matches.
*
* @param {Array} arr - The array to search.
* @param {Function} pred - A predicate function which returns `true` on matches.
* @param {*} [ctx] - The `this` arg for `pred`.
*
* @return {*?} - The last matching item if found, otherwise `undefined`.
*/
function arrayFindLast(arr, pred, ctx) {
var idx = arrayFindLastIndex(arr, pred, ctx)
if (idx >= 0) {
return arr[idx]
}
}
/**
* Finds the first index of a single element in an array matching `pred`.
*
* Remove once Node v0.10, v0.12, v1, v2, and v3 are no longer supported.
*
* @deprecated With Node.js v4
*
* @param {Array} arr - The array to search.
* @param {Function} pred - A predicate function which returns `true` on matches.
* @param {*} [ctx] - The `this` arg for `pred`.
*
* @return {number} - The index of the first matching item if found, otherwise `-1`.
*/
function arrayFindIndex(arr, pred, ctx) {
for (var i = 0; i < arr.length; ++i) {
if (pred.call(ctx, arr[i], i, arr)) {
return i
}
}
return -1
}
/**
* Finds the last index of a single element in an array matching `pred`.
*
* @param {Array} arr - The array to search.
* @param {Function} pred - A predicate function which returns `true` on matches.
* @param {*} [ctx] - The `this` arg for `pred`.
*
* @return {number} - The index of the last matching item if found, otherwise `-1`.
*/
function arrayFindLastIndex(arr, pred, ctx) {
for (var i = arr.length - 1; i >= 0; --i) {
if (pred.call(ctx, arr[i], i, arr)) {
return i
}
}
return -1
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 | 1 1 1 1 1 1 1 1 | 'use strict'
var util = require('util')
var hashes = require('./hashes')
var logger = require('../logger').child({component: 'cat'})
module.exports.handleCatHeaders = handleCatHeaders
module.exports.parsedHeadersToTrans = parsedHeadersToTrans
function handleCatHeaders(incomingCatId, obfTransaction, encKey, transaction) {
var parsedCatId = null
if (incomingCatId) {
parsedCatId = hashes.deobfuscateNameUsingKey(
incomingCatId,
encKey
)
}
var externalTrans = null
if (obfTransaction) {
try {
externalTrans = JSON.parse(
hashes.deobfuscateNameUsingKey(obfTransaction, encKey)
)
} catch (e) {
logger.trace(
'Got an unparsable CAT header x-newrelic-transaction: %s',
obfTransaction
)
}
}
parsedHeadersToTrans(parsedCatId, externalTrans, transaction)
}
function parsedHeadersToTrans(parsedCatId, externalTrans, transaction) {
if (typeof parsedCatId === 'string') {
transaction.incomingCatId = parsedCatId
}
if (util.isArray(externalTrans)) {
transaction.referringTransactionGuid = externalTrans[0]
if (typeof externalTrans[2] === 'string') {
transaction.tripId = externalTrans[2]
} else if (externalTrans[2]) {
transaction.invalidIncomingExternalTransaction = true
}
if (_isValidReferringHash(externalTrans[3])) {
transaction.referringPathHash = externalTrans[3]
} else if (externalTrans[3]) {
transaction.invalidIncomingExternalTransaction = true
}
}
}
function _isValidReferringHash(hash) {
return (typeof hash === 'string')
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | 1 1 1 | 'use strict'
var safeJSON = require('./safe-json')
var zlib = require('zlib')
module.exports = {
/**
* Take in an object literal, and deflate and then Base64 encode it.
*
* zlib works with streams, so this must be used asynchronously.
*
* @param {object} data
* The data to encode.
*
* @param {Function} callback
* The callback to take the results. The first parameter is any errors from
* encoding, and the second parameter is the encoded data object.
*/
encode: function encode(data, callback) {
try {
zlib.deflate(safeJSON.stringifySync(data), function cb_deflate(err, raw) {
if (err) return callback(err)
return callback(null, raw.toString('base64'))
})
} catch (err) {
return callback(err)
}
},
/**
* Base64 decode a string, decompress it, and then turn the results back into
* a JavaScript object.
*
* zlib works with streams, so this must be used asynchronously.
*
* @param {object} encoded
* The data to decode.
*
* @param {Function} callback
* The callback to take the results. The first parameter is any errors from
* decoding, and the second parameter is the decoded data object.
*/
decode: function decode(encoded, callback) {
zlib.inflate(new Buffer(encoded, 'base64'), function cb_inflate(err, raw) {
if (err) return callback(err)
try {
return callback(null, JSON.parse(raw))
} catch (error) {
return callback(error)
}
})
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 | 1 1 | 'use strict'
exports.shallow = shallowCopy
/**
* Performs a shallow copy of all properties on the source object.
*
* @param {object} source - The object to copy the properties from.
* @param {object} [dest={}] - The object to copy the properties to.
*
* @return {object} The destination object.
*/
function shallowCopy(source, dest) {
dest = dest || {}
for (var k in source) {
if (source.hasOwnProperty(k)) {
dest[k] = source[k]
}
}
return dest
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 | 1 1 1 1 | 'use strict' function isArguments(object) { return Object.prototype.toString.call(object) === '[object Arguments]' } function slice(args) { // Array.prototype.slice on arguments array-like is expensive var l = args.length, a = [], i for (i = 0; i < l; i++) { a[i] = args[i] } return a } /** * This is a node-specific version of deepEquals, modeled on bits and pieces * of loads of other implementations of this algorithm, most notably the * one in the Node.js source and Underscore's. It doesn't throw and handles * cycles. * * Everybody who writes one of these functions puts the documentation * inline, which makes it incredibly hard to follow. Here's what this version * of the algorithm does, in order: * * 1. === only tests objects and and functions by reference. Null is an object. * Any pair of identical entities failing this test are therefore objects * (including null), which need a recursive compare by attribute. * 2. Since the only matching entities to get to this test must be objects, if * a or b is not an object, they're clearly not the same. All unfiltered a * and b getting are objects (including null). * 3. null is an object, but null === null. All unfiltered a and b are non-null * objects. * 4. Buffers need to be special-cased because they live partially on the wrong * side of the C++ / JavaScript barrier. Still, calling this on structures * that can contain Buffers is a bad idea, because they can contain * multiple megabytes of data and comparing them byte-by-byte is very * expensive. buffertools is a better solution here, but this version of * this code is dependency free. * 5. It's much faster to compare dates by numeric value than by lexical value. * 6. Same goes for Regexps. * 7. The parts of an arguments list most people care about are the arguments * themselves, not the callee, which you shouldn't be looking at anyway. * 8. Objects are more complex: * a. ensure that a and b are on the same constructor chain * b. ensure that a and b have the same number of own properties (which is * what Object.keys returns). * c. ensure that cyclical references don't blow up the stack. * d. ensure that all the key names match (faster) * e. ensure that all of the associated values match, recursively (slower) * * (SOMEWHAT UNTESTED) ASSUMPTIONS: * * o Functions are only considered identical if they unify to the same * reference. To anything else is to invite the wrath of the halting problem. * o V8 is smart enough to optimize treating an Array like any other kind of * object. * o Users of this function are cool with mutually recursive data structures * that are otherwise identical being treated as the same. */ function deeper(a, b, ca, cb) { if (a === b) { return true } else if (typeof a !== 'object' || typeof b !== 'object') { return false } else if (a === null || b === null) { return false } else if (Buffer.isBuffer(a) && Buffer.isBuffer(b)) { if (a.length !== b.length) return false // potentially incredibly expensive for (var i = 0; i < a.length; i++) if (a[i] !== b[i]) return false return true } else if (a instanceof Date && b instanceof Date) { return a.getTime() === b.getTime() } else if (a instanceof RegExp && b instanceof RegExp) { return a.source === b.source && a.global === b.global && a.multiline === b.multiline && a.lastIndex === b.lastIndex && a.ignoreCase === b.ignoreCase } else if (isArguments(a) || isArguments(b)) { if (!(isArguments(a) && isArguments(b))) return false return deeper(slice(a), slice(b), ca, cb) } if (a.constructor !== b.constructor) return false var ka = Object.keys(a), kb = Object.keys(b) if (ka.length !== kb.length) return false var cal = ca.length while (cal--) if (ca[cal] === a) return cb[cal] === b ca.push(a); cb.push(b) ka.sort(); kb.sort() for (var j = ka.length - 1; j >= 0; j--) if (ka[j] !== kb[j]) return false var key for (var k = ka.length - 1; k >= 0; k--) { key = ka[k] if (!deeper(a[key], b[key], ca, cb)) return false } ca.pop(); cb.pop() return true } module.exports = function exports(a, b) { return deeper(a, b, [], []) } |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 | 1 3 3 3 6 6 4 3 | 'use strict'
/**
* Flatten nested maps of JSONifiable data.
*
* Ex: {a: 5, b: {c: true, d: 7}} -> {a: 5, 'b.c': true, 'b.d': 7}
*
* @param result Object to place key-value pairs into, normally called with {}
* @param prefix Prefix for keys, normally called with ''
* @param obj Object to be flattened
*
* @return Object with flattened key-value pairs
*/
module.exports = function flatten(result, prefix, obj, seen) {
seen = seen || []
seen.push(obj)
for (var key in obj) {
Iif (seen.indexOf(obj[key]) > -1) {
continue
}
if (obj[key] instanceof Object) flatten(result, prefix + key + '.', obj[key], seen)
else result[prefix + key] = obj[key]
}
return result
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | 1 1 1 1 1 1 1 1 1 1 | 'use strict'
var crypto = require('crypto')
function encode(bytes, keyBytes) {
for (var i = 0; i < bytes.length; i++) {
// This is really dense but happens commonly so I'm in-lining some of what
// could be tossed into variables. It takes the current byte of bytes, then
// XORs it with the current byte of the key (which uses modulo to make sure
// to not overrun the end.)
bytes.writeUInt8(bytes.readUInt8(i) ^ keyBytes.readUInt8(i % keyBytes.length), i)
}
return bytes
}
function obfuscateNameUsingKey(name, key) {
var encodedBytes = new Buffer(name, 'utf-8')
var keyBytes = new Buffer(key)
return encode(encodedBytes, keyBytes).toString('base64')
}
function deobfuscateNameUsingKey(name, key) {
var bytes = new Buffer(name, 'base64')
var keyBytes = new Buffer(key)
return encode(bytes, keyBytes).toString("utf-8")
}
function calculatePathHash(appName, pathName, referingPathHash) {
if (typeof referingPathHash === 'string') {
referingPathHash = parseInt(referingPathHash, 16)
}
var rotated = ((referingPathHash << 1) | (referingPathHash >>> 31)) >>> 0
var hash = getHash(appName, pathName)
var result = (rotated ^ hash) >>> 0
// This is a trick to pad it out to 8 chars regardless of length.
var retval = ('00000000' + result.toString(16)).substr(-8)
return retval
}
function getHash(appName, txName) {
var md5sum = crypto.createHash('md5')
md5sum.update(new Buffer(appName + ';' + txName), 'utf8')
var buf = new Buffer(md5sum.digest('base64'), 'base64')
// pull the low 4 bytes in network byte order
return buf.slice(buf.length - 4, buf.length).readUInt32BE(0)
}
exports.obfuscateNameUsingKey = obfuscateNameUsingKey
exports.deobfuscateNameUsingKey = deobfuscateNameUsingKey
exports.calculatePathHash = calculatePathHash
exports.getHash = getHash
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 | 1 1 1 1 1 1 1 1 1 1 1 | 'use strict'
module.exports = parse
module.exports.fromString = fromString
module.exports.fromMap = fromMap
// this creates a copy of trim that can be used with map
var trim = Function.prototype.call.bind(String.prototype.trim)
var logger = require('../logger').child({component: 'label-parser'})
var stringifySync = require('./safe-json').stringifySync
function parse(labels) {
var results
if (!labels) {
return []
} else if (typeof labels === 'string') {
results = fromString(labels)
} else if (labels) {
results = fromMap(labels)
}
results.warnings.forEach(function logWarnings(messaage) {
logger.warn(messaage)
})
return results.labels
}
function fromString(raw) {
var map = {}
if (!raw) {
return {labels: [], warnings: []}
}
var pairs = raw.split(';').map(trim)
var parts
while (!pairs[pairs.length - 1]) {
pairs.pop()
}
while (!pairs[0]) {
pairs.shift()
}
for (var i = 0, l = pairs.length; i < l; ++i) {
parts = pairs[i].split(':').map(trim)
if (parts.length !== 2) {
return warn('Could not create a Label pair from ' + parts[i])
} else if (!parts[0]) {
return warn('Label key can not be empty')
} else if (!parts[1]) {
return warn('Label value can not be empty')
}
map[parts[0]] = parts[1]
}
return fromMap(map)
function warn(message) {
return {labels: [], warnings: [
'Invalid Label String: ' + raw,
message
]}
}
}
function fromMap(map) {
var warnings = []
var labels = []
Object.keys(map).forEach(function processKeys(key) {
var type = truncate(key, 255)
if (!map[key] || typeof map[key] !== 'string') {
return warnings.push(
'Label value for ' + type +
'should be a string with a length between 1 and 255 characters'
)
}
var value = truncate(map[key], 255)
if (type !== key) {
warnings.push('Label key too long: ' + type)
}
if (value !== map[key]) {
warnings.push('Label value too long: ' + value)
}
labels.push({label_type: type, label_value: value})
})
if (labels.length > 64) {
warnings.push('Too many Labels, list truncated to 64')
labels = labels.slice(0, 64)
}
if (warnings.length) {
warnings.unshift('Partially Invalid Label Setting: ' + stringifySync(map))
}
return {labels: labels, warnings: warnings}
}
function truncate(str, max) {
var len = 0
var chr
for (var i = 0, l = str.length; i < l; ++i) {
chr = str.charCodeAt(i)
if (chr >= 0xD800 && chr <= 0xDBFF && i !== l) {
i += 1
}
if (++len === max) {
break
}
}
return str.slice(0, i + 1)
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 23 23 23 2 2 2 6 23 6 6 6 6 1 1 24 23 23 23 1 1 38 38 38 38 38 38 38 38 38 1 1 1 3 3 1 2 4 4 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 2 2 | 'use strict'
var stringifySync = require('./safe-json').stringifySync
var util = require('util')
var Readable = require('readable-stream')
var os = require('os')
module.exports = Logger
var LEVELS = {
'trace': 10,
'debug': 20,
'info': 30,
'warn': 40,
'error': 50,
'fatal': 60
}
util.inherits(Logger, Readable)
function Logger(options, extra) {
Iif (!(this instanceof Logger)) {
return new Logger(options, extra)
}
Readable.call(this)
var passedInLevel = this.coerce(options.level)
this.options = {
_level: passedInLevel,
enabled: options.enabled === undefined ? true : options.enabled
}
this.name = options.name
this.hostname = options.hostname || os.hostname()
this.extra = extra || {}
this.buffer = ''
this.reading = false
Eif (options.stream) {
this.pipe(options.stream)
}
}
var loggingFunctions = {}
Object.keys(LEVELS).forEach(function buildLevel(_level) {
function log(extra) {
var level = Logger.prototype.coerce(LEVELS[_level])
Iif (!this.options.enabled) return false
if (level < this.options._level) return false
var has_extra = typeof extra === 'object'
var args = Array.prototype.slice.call(arguments, has_extra ? 1 : 0)
return this.write(level, args, has_extra ? extra : null)
}
loggingFunctions[_level] = function checkLevel() {
log.apply(this, arguments)
}
var seenMessages = {}
loggingFunctions[_level + 'Once'] = function logOnce(key) {
if (typeof key !== 'string') {
this.debug('Attempted to key on a non-string in ' + _level + 'Once: ' + key)
return
}
var level = Logger.prototype.coerce(LEVELS[_level])
if (!this.options.enabled) return false
if (level < this.options._level) return false
if (seenMessages[key] !== true) {
var args = Array.prototype.slice.call(arguments, 1)
var writeSuccessful = log.apply(this, args)
if (writeSuccessful) {
seenMessages[key] = true
}
}
}
var seenPerInterval = {}
loggingFunctions[_level + 'OncePer'] = function logOncePer(key, interval) {
if (typeof key !== 'string') {
this.debug('Attempted to key on a non-string in ' + _level + 'Once: ' + key)
return
}
var level = Logger.prototype.coerce(LEVELS[_level])
if (!this.options.enabled) return false
if (level < this.options._level) return false
if (seenPerInterval[key] !== true) {
var args = Array.prototype.slice.call(arguments, 2)
var writeSuccessful = log.apply(this, args)
if (writeSuccessful) {
seenPerInterval[key] = true
var clearSeen = setTimeout(function clearKey() {
delete seenPerInterval[key]
}, interval)
if (clearSeen.unref !== undefined) {
clearSeen.unref()
}
}
}
}
})
util._extend(Logger.prototype, loggingFunctions)
Logger.prototype.coerce = function coerce(value) {
if (!isNaN(parseInt(value, 10)) && isFinite(value)) {
// value is numeric
Iif (value < 10) value = 10
Iif (value > 60) value = 60
return value
}
return LEVELS[value] || 50
}
Logger.prototype.child = function child(extra) {
var childLogger = Object.create(loggingFunctions)
childLogger.extra = util._extend({}, this.extra)
util._extend(childLogger.extra, extra)
var parent = this
childLogger.options = parent.options
childLogger.write = function write(level, args, extra) {
extra = getPropertiesToLog(extra)
var selfExtra = util._extend({}, this.extra)
extra = util._extend(selfExtra, extra)
return parent.write(level, args, extra)
}
childLogger.setEnabled = Logger.prototype.setEnabled
childLogger.child = Logger.prototype.child
return childLogger
}
Logger.prototype.level = function level(lvl) {
this.options._level = this.coerce(lvl)
}
Logger.prototype.setEnabled = function setEnabled(enabled) {
if (typeof enabled === 'boolean') {
this.options.enabled = enabled
}
}
Logger.prototype._read = function _read() {
Iif (this.buffer.length !== 0) {
this.reading = this.push(this.buffer)
this.buffer = ''
} else {
this.reading = true
}
}
/**
* For performance reasons we do not support %j because we will have
* already converted the objects to strings.
* Returns a boolean representing the status of the write
* (success/failure)
*/
Logger.prototype.write = function write(level, args, extra) {
for (var i = 0, l = args.length; i < l; ++i) {
Iif (typeof args[i] === 'function') {
args[i] = args[i].valueOf()
} else Iif (typeof args[i] === 'object') {
args[i] = stringifySync(args[i])
}
}
var entry = new Entry(this, level, util.format.apply(util, args))
util._extend(entry, this.extra)
util._extend(entry, getPropertiesToLog(extra))
Eif (this.reading) {
this.reading = this.push(stringifySync(entry) + '\n')
} else {
this.buffer += stringifySync(entry) + '\n'
}
return true
}
function Entry(logger, level, msg) {
this.v = 0
this.level = level
this.name = logger.name
this.hostname = logger.hostname
this.pid = process.pid
this.time = new Date().toISOString()
this.msg = msg
}
function getPropertiesToLog(extra) {
var obj = util._extend({}, extra)
// Error properties (message, stack) are not enumerable, so getting them directly
Iif (extra instanceof Error) {
var names = Object.getOwnPropertyNames(extra)
if (names) {
for (var i = 0; i < names.length; i++) {
obj[names[i]] = extra[names[i]]
}
}
}
return obj
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 | 1 1 26 26 | 'use strict'
var stringifySafe = require('json-stringify-safe')
module.exports = {
parse: function parseAsync(str, cb) {
try {
cb(null, JSON.parse(str))
} catch (err) {
cb(err, null)
}
},
stringify: function stringifyAsync(obj, cb) {
try {
cb(null, stringifySafe(obj))
} catch (err) {
cb(err, '[UNPARSABLE OBJECT]')
}
},
stringifySync: function stringifySync(obj, returnVal) {
try {
return stringifySafe(obj)
} catch (err) {
return returnVal || '[UNPARSABLE OBJECT]'
}
}
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | 1 1 1 1 1 1 1 1 | 'use strict'
var EventEmitter = require('events').EventEmitter
var util = require('util')
/**
* Pipe a readable stream into this sink that fulfills the Writable Stream
* contract and the callback will be fired when the stream has been completely
* read.
*/
function StreamSink(callback) {
EventEmitter.call(this)
this.callback = callback
this.sink = ''
this.writable = true
var sink = this
this.on('error', function handle_error(error) {
sink.writable = false
callback(error)
})
}
util.inherits(StreamSink, EventEmitter)
StreamSink.prototype.write = function write(string) {
if (!this.writable) {
this.emit('error', new Error("Sink no longer writable!"))
return false
}
// Explicitly copy buffer contents so we are sure to release references to
// the TLS slab buffer region.
this.sink += string.toString()
return true
}
StreamSink.prototype.end = function end() {
this.writable = false
this.callback(null, this.sink)
}
StreamSink.prototype.destroy = function destroy() {
this.emit('close')
this.writable = false
delete this.sink
}
module.exports = StreamSink
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | 1 1 | 'use strict'
/**
* Given an ordered list of disjoint intervals and a new interval to fold into
* it, determine if the new interval is a sub-interval (in which case it's
* redundant), an overlapping interval (in which case, replace the most recent
* interval on the list with an interval representing the union of the new and
* last intervals), or otherwise (it's disjoint to what we already
* have, in which case add it to the list). Meant to be used with
* Array.reduce().
*
* Assumes the list being reduced is sorted by interval start time.
*
* @param {Array} accum The accumulated list of reduced intervals.
* @param {Array} newest A new pair of range start and end to compare to the
* existing intervals.
*
* @return {Array} A list of intervals updated to include the new interval.
*/
function sumChildren(pairs, parentEnd) {
if (!pairs.length) return 0
pairs.sort(function cb_sort(a, b) {
return a[0] - b[0]
})
var start = pairs[0][0]
var end = start
var diff = 0
var segmentEnd
var pair
for (var i = 0, l = pairs.length; i < l; ++i) {
pair = pairs[i]
if (pair[0] > parentEnd) break
segmentEnd = pair[1] > parentEnd ? parentEnd : pair[1]
if (pair[0] > end) {
diff += pair[0] - end
end = segmentEnd
} else if (segmentEnd > end) {
end = segmentEnd
}
}
return end - start - diff
}
module.exports = sumChildren
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 | 1 1 1 1 | 'use strict'
var url = require('url')
// TODO: Once Node v0.10 is deprecated, change this to use a `Set` instead.
var LOCALHOST_NAMES = {
"localhost": true,
"127.0.0.1": true,
"0.0.0.0": true,
"0:0:0:0:0:0:0:1": true,
"::1": true,
"0:0:0:0:0:0:0:0": true,
"::": true
}
/**
* Utility functions for enforcing New Relic naming conditions on URLs,
* and extracting and setting parameters on traces / web trace segments.
*/
module.exports = {
/**
* Dictionary whose keys are all synonyms for localhost.
*
* @const
*/
LOCALHOST_NAMES: LOCALHOST_NAMES,
/**
* Checks if the given name is in the dictionary of localhost names.
*
* @param {string} host - The hostname to lookup.
*
* @return {bool} - True if the given hostname is a synonym for localhost.
*/
isLocalhost: function isLocahost(host) {
return LOCALHOST_NAMES.hasOwnProperty(host)
},
/**
* This was handed down from the prototype as the canonical list of status
* codes that short-circuit naming and normalization. The agent can be
* configured to mark HTTP status codes as not being errors.
*
* @param {Config} config The configuration containing the error list.
* @param {string} code The HTTP status code to check.
*
* @returns {bool} Whether the status code should be ignored.
*/
isError: function isError(config, code) {
return code >= 400 && !isIgnoredStatusCodeForErrors(config, code)
},
/**
* Returns true if the status code is an HTTP error, and it is configured to be ignored.
*
* @param {Config} config The configuration containing the error list.
* @param {string} code The HTTP status code to check.
*
* @returns {bool} Whether the status code should be ignored.
*/
isIgnoredError: function isIgnoredError(config, code) {
return code >= 400 && isIgnoredStatusCodeForErrors(config, code)
},
/**
* Get back the pieces of the URL that New Relic cares about. Apply these
* restrictions, in order:
*
* 1. Ensure that after parsing the URL, there's at least '/'
* 2. Strip off session trackers after ';' (a New Relic convention)
* 3. Remove trailing slash.
*
* @param {string} requestURL The URL fragment to be scrubbed.
* @return {string} The cleaned URL.
*/
scrub: function scrub(requestURL) {
if (typeof requestURL === 'string') {
requestURL = url.parse(requestURL)
}
var path = requestURL.pathname
if (path) {
path = path.split(';')[0]
if (path !== '/' && path.charAt(path.length - 1) === '/') {
path = path.substring(0, path.length - 1)
}
} else {
path = '/'
}
return path
},
/**
* Extract query parameters, dealing with bare parameters and parameters with
* no value as appropriate:
*
* 'var1&var2=value' is not necessarily the same as 'var1=&var2=value'
*
* In my world, one is an assertion of presence, and the other is an empty
* variable. Some web frameworks behave this way as well, so don't lose
* information.
*
* @param {string} requestURL The URL to be parsed.
* @returns {object} The parameters parsed from the request
*/
parseParameters: function parseParameters(requestURL) {
var parsed = requestURL
if (typeof requestURL === 'string') {
parsed = url.parse(requestURL, true)
}
var parameters = {}
if (parsed.query) {
var keys = Object.keys(parsed.query)
for (var i = 0, l = keys.length; i < l; ++i) {
var key = keys[i]
if (parsed.query[key] === '' && parsed.path.indexOf(key + '=') === -1) {
parameters[key] = true
} else {
parameters[key] = parsed.query[key]
}
}
}
return parameters
},
/**
* Performs the logic of `urltils.scrub` and `urltils.parseParameters` with
* only a single parse of the given URL.
*
* @param {string} requestURL - The URL to scrub and extra parameters from.
*
* @return {object} An object containing the scrubbed url at `.path` and the
* parsed parameters at `.parameters`.
*/
scrubAndParseParameters: function scrubAndParseParameters(requestURL) {
if (typeof requestURL === 'string') {
requestURL = url.parse(requestURL, true)
}
return {
path: this.scrub(requestURL),
parameters: this.parseParameters(requestURL)
}
},
/**
* Copy a set of request parameters from one object to another, following
* a few important rules:
*
* 1. Do not copy a parameter if it's in config.ignored_params.
* 2. Do not overwrite any existing parameters in destination, including
* parameters set to null or undefined.
*
* @param {Config} config Configuration, where `ignored_params` is
* guaranteed to be an Array.
* @param {object} source Parameters to be copied (not changed).
* @param {object} destination Dictionary to which parameters are copied
* (mutated in place).
*/
copyParameters: function copyParameters(config, source, destination) {
if (!(config && config.capture_params && source && destination)) return
var keys = Object.keys(source)
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
if (config.ignored_params.indexOf(key) === -1 && !(key in destination)) {
destination[key] = source[key]
}
}
},
/**
* Copy a set of request parameters from one object to another.
* Existing attributes on the `destination` will be overwritten.
* Unlike `copyParameters`, this function will operate when
* `capture_params` is not enabled.
*
* @param {Config} config Configuration, where `ignored_params` is
* guaranteed to be an Array.
* @param {object} source Parameters to be copied (not changed).
* @param {object} destination Dictionary to which parameters are copied
* (mutated in place).
*/
overwriteParameters: function overwriteParameters(config, source, destination) {
if (!(config && source && destination)) return
var keys = Object.keys(source)
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
if (config.ignored_params.indexOf(key) === -1) {
destination[key] = source[key]
}
}
}
}
function isIgnoredStatusCodeForErrors(config, code) {
var codes = []
if (config &&
config.error_collector &&
config.error_collector.ignore_status_codes) {
codes = config.error_collector.ignore_status_codes
}
return codes.indexOf(parseInt(code, 10)) >= 0
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| obfuscate.js | 73.53% | (25 / 34) | 0% | (0 / 4) | 57.14% | (4 / 7) | 75.76% | (25 / 33) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 1 26 1 4 1 4 | 'use strict'
module.exports = obfuscate
var singleQuote = /'(?:[^']|'')*?(?:\\'.*|'(?!'))/
var doubleQuote = /"(?:[^"]|"")*?(?:\\".*|"(?!"))/
var dollarQuote = /(\$(?!\d)[^$]*?\$).*?(?:\1|$)/
var oracleQuote = /q'\[.*?(?:\]'|$)|q'\{.*?(?:\}'|$)|q'\<.*?(?:\>'|$)|q'\(.*?(?:\)'|$)/
var comment = /(?:#|--).*?(?=\r|\n|$)/
var multilineComment = /\/\*(?:[^/]|\/[^*])*?(?:\*\/|\/\*.*)/
var uuid = /\{?(?:[0-9a-f]\-*){32}\}?/
var hex = /0x[0-9a-f]+/
var boolean = /true|false|null/
var number = /\b-?(?:[0-9]+\.)?[0-9]+([eE][+-]?[0-9]+)?/
var dialects = {}
dialects.mysql = [
replacer(join(
[doubleQuote, singleQuote, comment, multilineComment, hex, boolean, number],
'gi'
)),
unmatchedPairs(/'|"|\/\*|\*\//)
]
dialects.postgres = [
replacer(join(
[dollarQuote, singleQuote, comment, multilineComment, uuid, boolean, number],
'gi'
)),
unmatchedPairs(/'|\/\*|\*\/|\$/)
]
dialects.cassandra = [
replacer(join(
[singleQuote, comment, multilineComment, uuid, hex, boolean, number],
'gi'
)),
unmatchedPairs(/'|\/\*|\*\//)
]
dialects.oracle = [
replacer(join(
[oracleQuote, singleQuote, comment, multilineComment, number],
'gi'
)),
unmatchedPairs(/'|\/\*|\*\//)
]
function obfuscate(raw, dialect) {
if (!dialects[dialect]) throw new Error('Unknown sql implementation')
var replacers = dialects[dialect]
var obfuscated = raw
for (var i = 0, l = replacers.length; i < l; ++i) {
obfuscated = replacers[i](obfuscated)
}
return obfuscated
}
function join(expressions, flags) {
return new RegExp(expressions.map(toPart).join('|'), flags)
}
function toPart(expressions) {
return expressions.toString().slice(1, -1)
}
function replacer(regex) {
return function replace(sql) {
return sql.replace(regex, '?')
}
}
function unmatchedPairs(regex) {
return function check(sql) {
return regex.test(sql) ? '?' : sql
}
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 15.6% | (17 / 109) | 3.13% | (2 / 64) | 0% | (0 / 12) | 18.09% | (17 / 94) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | var Writable = require('readable-stream').Writable
var inherits = require('inherits')
Iif (typeof Uint8Array === 'undefined') {
var U8 = require('typedarray').Uint8Array
} else {
var U8 = Uint8Array
}
function ConcatStream(opts, cb) {
if (!(this instanceof ConcatStream)) return new ConcatStream(opts, cb)
if (typeof opts === 'function') {
cb = opts
opts = {}
}
if (!opts) opts = {}
var encoding = opts.encoding
var shouldInferEncoding = false
if (!encoding) {
shouldInferEncoding = true
} else {
encoding = String(encoding).toLowerCase()
if (encoding === 'u8' || encoding === 'uint8') {
encoding = 'uint8array'
}
}
Writable.call(this, { objectMode: true })
this.encoding = encoding
this.shouldInferEncoding = shouldInferEncoding
if (cb) this.on('finish', function () { cb(this.getBody()) })
this.body = []
}
module.exports = ConcatStream
inherits(ConcatStream, Writable)
ConcatStream.prototype._write = function(chunk, enc, next) {
this.body.push(chunk)
next()
}
ConcatStream.prototype.inferEncoding = function (buff) {
var firstBuffer = buff === undefined ? this.body[0] : buff;
if (Buffer.isBuffer(firstBuffer)) return 'buffer'
if (typeof Uint8Array !== 'undefined' && firstBuffer instanceof Uint8Array) return 'uint8array'
if (Array.isArray(firstBuffer)) return 'array'
if (typeof firstBuffer === 'string') return 'string'
if (Object.prototype.toString.call(firstBuffer) === "[object Object]") return 'object'
return 'buffer'
}
ConcatStream.prototype.getBody = function () {
if (!this.encoding && this.body.length === 0) return []
if (this.shouldInferEncoding) this.encoding = this.inferEncoding()
if (this.encoding === 'array') return arrayConcat(this.body)
if (this.encoding === 'string') return stringConcat(this.body)
if (this.encoding === 'buffer') return bufferConcat(this.body)
if (this.encoding === 'uint8array') return u8Concat(this.body)
return this.body
}
var isArray = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]'
}
function isArrayish (arr) {
return /Array\]$/.test(Object.prototype.toString.call(arr))
}
function isBufferish (p) {
return typeof p === 'string' || isArrayish(p) || (p && typeof p.subarray === 'function')
}
function stringConcat (parts) {
var strings = []
var needsToString = false
for (var i = 0; i < parts.length; i++) {
var p = parts[i]
if (typeof p === 'string') {
strings.push(p)
} else if (Buffer.isBuffer(p)) {
strings.push(p)
} else if (isBufferish(p)) {
strings.push(new Buffer(p))
} else {
strings.push(new Buffer(String(p)))
}
}
if (Buffer.isBuffer(parts[0])) {
strings = Buffer.concat(strings)
strings = strings.toString('utf8')
} else {
strings = strings.join('')
}
return strings
}
function bufferConcat (parts) {
var bufs = []
for (var i = 0; i < parts.length; i++) {
var p = parts[i]
if (Buffer.isBuffer(p)) {
bufs.push(p)
} else if (isBufferish(p)) {
bufs.push(new Buffer(p))
} else {
bufs.push(new Buffer(String(p)))
}
}
return Buffer.concat(bufs)
}
function arrayConcat (parts) {
var res = []
for (var i = 0; i < parts.length; i++) {
res.push.apply(res, parts[i])
}
return res
}
function u8Concat (parts) {
var len = 0
for (var i = 0; i < parts.length; i++) {
if (typeof parts[i] === 'string') {
parts[i] = new Buffer(parts[i])
}
len += parts[i].length
}
var u8 = new U8(len)
for (var i = 0, offset = 0; i < parts.length; i++) {
var part = parts[i]
for (var j = 0; j < part.length; j++) {
u8[offset++] = part[j]
}
}
return u8
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| inherits.js | 66.67% | (4 / 6) | 50% | (1 / 2) | 100% | (0 / 0) | 80% | (4 / 5) |
| 1 2 3 4 5 6 7 8 9 | 1 1 1 1 | try {
var util = require('util');
Iif (typeof util.inherits !== 'function') throw '';
module.exports = util.inherits;
} catch (e) {
module.exports = require('./inherits_browser.js');
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| readable.js | 91.67% | (11 / 12) | 57.14% | (4 / 7) | 100% | (1 / 1) | 91.67% | (11 / 12) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | 1 1 1 1 1 1 1 1 1 1 1 | var Stream = (function (){
try {
return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
} catch(_){}
}());
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
Iif (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) {
module.exports = Stream;
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| _stream_duplex.js | 43.59% | (17 / 39) | 13.64% | (3 / 22) | 0% | (0 / 5) | 48.48% | (16 / 33) | |
| _stream_passthrough.js | 63.64% | (7 / 11) | 0% | (0 / 2) | 0% | (0 / 2) | 70% | (7 / 10) | |
| _stream_readable.js | 12.97% | (72 / 555) | 1.14% | (4 / 352) | 1.79% | (1 / 56) | 14.78% | (72 / 487) | |
| _stream_transform.js | 16.67% | (13 / 78) | 0% | (0 / 42) | 0% | (0 / 11) | 19.4% | (13 / 67) | |
| _stream_writable.js | 17.45% | (52 / 298) | 5.03% | (8 / 159) | 5.71% | (2 / 35) | 19.19% | (52 / 271) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 | 1 1 1 1 1 1 1 1 1 1 8 8 1 1 1 1 | // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. 'use strict'; /*<replacement>*/ var objectKeys = Object.keys || function (obj) { var keys = []; for (var key in obj) { keys.push(key); }return keys; }; /*</replacement>*/ module.exports = Duplex; /*<replacement>*/ var processNextTick = require('process-nextick-args'); /*</replacement>*/ /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ var Readable = require('./_stream_readable'); var Writable = require('./_stream_writable'); util.inherits(Duplex, Readable); var keys = objectKeys(Writable.prototype); for (var v = 0; v < keys.length; v++) { var method = keys[v]; if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); if (options && options.readable === false) this.readable = false; if (options && options.writable === false) this.writable = false; this.allowHalfOpen = true; if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; this.once('end', onend); } // the no-half-open enforcer function onend() { // if we allow half-open state, or if the writable side ended, // then we're ok. if (this.allowHalfOpen || this._writableState.ended) return; // no more data can be written. // But allow more writes to happen in this tick. processNextTick(onEndNT, this); } function onEndNT(self) { self.end(); } function forEach(xs, f) { for (var i = 0, l = xs.length; i < l; i++) { f(xs[i], i); } } |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | 1 1 1 1 1 1 1 | // a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | 'use strict';
module.exports = Readable;
/*<replacement>*/
var processNextTick = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
var Duplex;
/*</replacement>*/
Readable.ReadableState = ReadableState;
/*<replacement>*/
var EE = require('events').EventEmitter;
var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
var Stream;
(function () {
try {
Stream = require('st' + 'ream');
} catch (_) {} finally {
Iif (!Stream) Stream = require('events').EventEmitter;
}
})();
/*</replacement>*/
var Buffer = require('buffer').Buffer;
/*<replacement>*/
var bufferShim = require('buffer-shims');
/*</replacement>*/
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
/*<replacement>*/
var debugUtil = require('util');
var debug = void 0;
Eif (debugUtil && debugUtil.debuglog) {
debug = debugUtil.debuglog('stream');
} else {
debug = function () {};
}
/*</replacement>*/
var BufferList = require('./internal/streams/BufferList');
var StringDecoder;
util.inherits(Readable, Stream);
function prependListener(emitter, event, fn) {
// Sadly this is not cacheable as some libraries bundle their own
// event emitter implementation with them.
if (typeof emitter.prependListener === 'function') {
return emitter.prependListener(event, fn);
} else {
// This is a hack to make sure that our error handler is attached before any
// userland ones. NEVER DO THIS. This is here only because this code needs
// to continue to work with older versions of Node.js that do not include
// the prependListener() method. The goal is to eventually remove this hack.
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
}
}
function ReadableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~ ~this.highWaterMark;
// A linked list is used to store data chunks instead of an array because the
// linked list can remove elements from the beginning faster than
// array.shift()
this.buffer = new BufferList();
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
this.resumeScheduled = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
Duplex = Duplex || require('./_stream_duplex');
if (!(this instanceof Readable)) return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
if (options && typeof options.read === 'function') this._read = options.read;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function (chunk, encoding) {
var state = this._readableState;
if (!state.objectMode && typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = bufferShim.from(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function (chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
Readable.prototype.isPaused = function () {
return this._readableState.flowing === false;
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null) {
state.reading = false;
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var _e = new Error('stream.unshift() after end event');
stream.emit('error', _e);
} else {
var skipAdd;
if (state.decoder && !addToFront && !encoding) {
chunk = state.decoder.write(chunk);
skipAdd = !state.objectMode && chunk.length === 0;
}
if (!addToFront) state.reading = false;
// Don't add to the buffer if we've decoded to an empty string chunk and
// we're not in object mode
if (!skipAdd) {
// if we want the data now, just emit it.
if (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
if (state.needReadable) emitReadable(stream);
}
}
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function (enc) {
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
};
// Don't raise the hwm > 8MB
var MAX_HWM = 0x800000;
function computeNewHighWaterMark(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2 to prevent increasing hwm excessively in
// tiny amounts
n--;
n |= n >>> 1;
n |= n >>> 2;
n |= n >>> 4;
n |= n >>> 8;
n |= n >>> 16;
n++;
}
return n;
}
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function howMuchToRead(n, state) {
if (n <= 0 || state.length === 0 && state.ended) return 0;
if (state.objectMode) return 1;
if (n !== n) {
// Only flow one buffer at a time
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
}
// If we're asking for more than the current hwm, then raise the hwm.
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
if (n <= state.length) return n;
// Don't have enough
if (!state.ended) {
state.needReadable = true;
return 0;
}
return state.length;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function (n) {
debug('read', n);
n = parseInt(n, 10);
var state = this._readableState;
var nOrig = n;
if (n !== 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
if (state.length === 0) endReadable(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
debug('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
if (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
debug('length less than watermark', doRead);
}
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading) {
doRead = false;
debug('reading or ended', doRead);
} else if (doRead) {
debug('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
if (!state.reading) n = howMuchToRead(nOrig, state);
}
var ret;
if (n > 0) ret = fromList(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
} else {
state.length -= n;
}
if (state.length === 0) {
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (!state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
if (nOrig !== n && state.ended) endReadable(this);
}
if (ret !== null) this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.ended) return;
if (state.decoder) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// emit 'readable' now to make sure it gets picked up.
emitReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream);
}
}
function emitReadable_(stream) {
debug('emit readable');
stream.emit('readable');
flow(stream);
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
processNextTick(maybeReadMore_, stream, state);
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;else len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function (n) {
this.emit('error', new Error('_read() is not implemented'));
};
Readable.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
debug('onunpipe');
if (readable === src) {
cleanup();
}
}
function onend() {
debug('onend');
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
var cleanedUp = false;
function cleanup() {
debug('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
src.removeListener('data', ondata);
cleanedUp = true;
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
// If the user pushes more data while we're writing to dest then we'll end up
// in ondata again. However, we only want to increase awaitDrain once because
// dest will only emit one 'drain' event for the multiple writes.
// => Introduce a guard on increasing awaitDrain.
var increasedAwaitDrain = false;
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
increasedAwaitDrain = false;
var ret = dest.write(chunk);
if (false === ret && !increasedAwaitDrain) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
// => Check whether `dest` is still a piping destination.
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
debug('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
increasedAwaitDrain = true;
}
src.pause();
}
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// Make sure our error handler is attached before userland ones.
prependListener(dest, 'error', onerror);
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
debug('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
debug('unpipe');
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
debug('pipe resume');
src.resume();
}
return dest;
};
function pipeOnDrain(src) {
return function () {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
if (state.awaitDrain) state.awaitDrain--;
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow(src);
}
};
}
Readable.prototype.unpipe = function (dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes) return this;
if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
if (dest) dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
for (var i = 0; i < len; i++) {
dests[i].emit('unpipe', this);
}return this;
}
// try to find the right one.
var index = indexOf(state.pipes, dest);
if (index === -1) return this;
state.pipes.splice(index, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
if (ev === 'data') {
// Start flowing on next tick if stream isn't explicitly paused
if (this._readableState.flowing !== false) this.resume();
} else if (ev === 'readable') {
var state = this._readableState;
if (!state.endEmitted && !state.readableListening) {
state.readableListening = state.needReadable = true;
state.emittedReadable = false;
if (!state.reading) {
processNextTick(nReadingNextTick, this);
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
function nReadingNextTick(self) {
debug('readable nexttick read 0');
self.read(0);
}
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
state.flowing = true;
resume(this, state);
}
return this;
};
function resume(stream, state) {
if (!state.resumeScheduled) {
state.resumeScheduled = true;
processNextTick(resume_, stream, state);
}
}
function resume_(stream, state) {
if (!state.reading) {
debug('resume read 0');
stream.read(0);
}
state.resumeScheduled = false;
state.awaitDrain = 0;
stream.emit('resume');
flow(stream);
if (state.flowing && !state.reading) stream.read(0);
}
Readable.prototype.pause = function () {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
this._readableState.flowing = false;
this.emit('pause');
}
return this;
};
function flow(stream) {
var state = stream._readableState;
debug('flow', state.flowing);
while (state.flowing && stream.read() !== null) {}
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function (stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function () {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) self.push(chunk);
}
self.push(null);
});
stream.on('data', function (chunk) {
debug('wrapped data');
if (state.decoder) chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (this[i] === undefined && typeof stream[i] === 'function') {
this[i] = function (method) {
return function () {
return stream[method].apply(stream, arguments);
};
}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function (ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function (n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function fromList(n, state) {
// nothing buffered
if (state.length === 0) return null;
var ret;
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
// read it all, truncate the list
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
state.buffer.clear();
} else {
// read part of list
ret = fromListPartial(n, state.buffer, state.decoder);
}
return ret;
}
// Extracts only enough buffered data to satisfy the amount requested.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function fromListPartial(n, list, hasStrings) {
var ret;
if (n < list.head.data.length) {
// slice is the same for buffers and strings
ret = list.head.data.slice(0, n);
list.head.data = list.head.data.slice(n);
} else if (n === list.head.data.length) {
// first chunk is a perfect match
ret = list.shift();
} else {
// result spans more than one buffer
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
}
return ret;
}
// Copies a specified amount of characters from the list of buffered data
// chunks.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function copyFromBufferString(n, list) {
var p = list.head;
var c = 1;
var ret = p.data;
n -= ret.length;
while (p = p.next) {
var str = p.data;
var nb = n > str.length ? str.length : n;
if (nb === str.length) ret += str;else ret += str.slice(0, n);
n -= nb;
if (n === 0) {
if (nb === str.length) {
++c;
if (p.next) list.head = p.next;else list.head = list.tail = null;
} else {
list.head = p;
p.data = str.slice(nb);
}
break;
}
++c;
}
list.length -= c;
return ret;
}
// Copies a specified amount of bytes from the list of buffered data chunks.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function copyFromBuffer(n, list) {
var ret = bufferShim.allocUnsafe(n);
var p = list.head;
var c = 1;
p.data.copy(ret);
n -= p.data.length;
while (p = p.next) {
var buf = p.data;
var nb = n > buf.length ? buf.length : n;
buf.copy(ret, ret.length - n, 0, nb);
n -= nb;
if (n === 0) {
if (nb === buf.length) {
++c;
if (p.next) list.head = p.next;else list.head = list.tail = null;
} else {
list.head = p;
p.data = buf.slice(nb);
}
break;
}
++c;
}
list.length -= c;
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
processNextTick(endReadableNT, state, stream);
}
}
function endReadableNT(state, stream) {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
}
function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 | 1 1 1 1 1 1 1 1 1 1 1 1 1 | // a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function TransformState(stream) {
this.afterTransform = function (er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
this.writeencoding = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined) stream.push(data);
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = new TransformState(this);
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.once('prefinish', function () {
if (typeof this._flush === 'function') this._flush(function (er, data) {
done(stream, er, data);
});else done(stream);
});
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('_transform() is not implemented');
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data !== null && data !== undefined) stream.push(data);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
if (ws.length) throw new Error('Calling transform done when ws.length != 0');
if (ts.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | // A bit simpler than readable streams.
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
'use strict';
module.exports = Writable;
/*<replacement>*/
var processNextTick = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick;
/*</replacement>*/
/*<replacement>*/
var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
/*<replacement>*/
var internalUtil = {
deprecate: require('util-deprecate')
};
/*</replacement>*/
/*<replacement>*/
var Stream;
(function () {
try {
Stream = require('st' + 'ream');
} catch (_) {} finally {
Iif (!Stream) Stream = require('events').EventEmitter;
}
})();
/*</replacement>*/
var Buffer = require('buffer').Buffer;
/*<replacement>*/
var bufferShim = require('buffer-shims');
/*</replacement>*/
util.inherits(Writable, Stream);
function nop() {}
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
this.next = null;
}
function WritableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~ ~this.highWaterMark;
// drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
// count buffered requests
this.bufferedRequestCount = 0;
// allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState.prototype, 'buffer', {
get: internalUtil.deprecate(function () {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.')
});
} catch (_) {}
})();
// Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
Eif (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable, Symbol.hasInstance, {
value: function (object) {
if (realHasInstance.call(this, object)) return true;
return object && object._writableState instanceof WritableState;
}
});
} else {
realHasInstance = function (object) {
return object instanceof this;
};
}
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex');
// Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
return new Writable(options);
}
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe, not readable'));
};
function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
processNextTick(cb, er);
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
var er = false;
// Always throw error if a null is written
// if we are not in object mode then throw
// if it is not a buffer, string, or undefined.
if (chunk === null) {
er = new TypeError('May not write null values to stream');
} else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
if (er) {
stream.emit('error', er);
processNextTick(cb, er);
valid = false;
}
return valid;
}
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = bufferShim.from(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
if (Buffer.isBuffer(chunk)) encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) processNextTick(cb, er);else cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state);
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
/*<replacement>*/
asyncWrite(afterWrite, stream, state, finished, cb);
/*</replacement>*/
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
while (entry) {
buffer[count] = entry;
entry = entry.next;
count += 1;
}
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
// doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequestCount = 0;
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('_write() is not implemented'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending && !state.finished) endWritable(this, state, cb);
};
function needFinish(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function prefinish(stream, state) {
if (!state.prefinished) {
state.prefinished = true;
stream.emit('prefinish');
}
}
function finishMaybe(stream, state) {
var need = needFinish(state);
if (need) {
if (state.pendingcb === 0) {
prefinish(stream, state);
state.finished = true;
stream.emit('finish');
} else {
prefinish(stream, state);
}
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished) processNextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
// It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function (err) {
var entry = _this.entry;
_this.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
}
if (state.corkedRequestsFree) {
state.corkedRequestsFree.next = _this;
} else {
state.corkedRequestsFree = _this;
}
};
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| BufferList.js | 18.87% | (10 / 53) | 0% | (0 / 14) | 0% | (0 / 7) | 22.73% | (10 / 44) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 | 1 1 1 1 1 1 1 1 1 1 | 'use strict';
var Buffer = require('buffer').Buffer;
/*<replacement>*/
var bufferShim = require('buffer-shims');
/*</replacement>*/
module.exports = BufferList;
function BufferList() {
this.head = null;
this.tail = null;
this.length = 0;
}
BufferList.prototype.push = function (v) {
var entry = { data: v, next: null };
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
};
BufferList.prototype.unshift = function (v) {
var entry = { data: v, next: this.head };
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
};
BufferList.prototype.shift = function () {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
};
BufferList.prototype.clear = function () {
this.head = this.tail = null;
this.length = 0;
};
BufferList.prototype.join = function (s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}return ret;
};
BufferList.prototype.concat = function (n) {
if (this.length === 0) return bufferShim.alloc(0);
if (this.length === 1) return this.head.data;
var ret = bufferShim.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
p.data.copy(ret, i);
i += p.data.length;
p = p.next;
}
return ret;
};
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 10.81% | (8 / 74) | 1.59% | (1 / 63) | 0% | (0 / 4) | 10.81% | (8 / 74) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 | 1 1 1 1 1 1 1 1 | 'use strict';
var buffer = require('buffer');
var Buffer = buffer.Buffer;
var SlowBuffer = buffer.SlowBuffer;
var MAX_LEN = buffer.kMaxLength || 2147483647;
exports.alloc = function alloc(size, fill, encoding) {
if (typeof Buffer.alloc === 'function') {
return Buffer.alloc(size, fill, encoding);
}
if (typeof encoding === 'number') {
throw new TypeError('encoding must not be number');
}
if (typeof size !== 'number') {
throw new TypeError('size must be a number');
}
if (size > MAX_LEN) {
throw new RangeError('size is too large');
}
var enc = encoding;
var _fill = fill;
if (_fill === undefined) {
enc = undefined;
_fill = 0;
}
var buf = new Buffer(size);
if (typeof _fill === 'string') {
var fillBuf = new Buffer(_fill, enc);
var flen = fillBuf.length;
var i = -1;
while (++i < size) {
buf[i] = fillBuf[i % flen];
}
} else {
buf.fill(_fill);
}
return buf;
}
exports.allocUnsafe = function allocUnsafe(size) {
if (typeof Buffer.allocUnsafe === 'function') {
return Buffer.allocUnsafe(size);
}
if (typeof size !== 'number') {
throw new TypeError('size must be a number');
}
if (size > MAX_LEN) {
throw new RangeError('size is too large');
}
return new Buffer(size);
}
exports.from = function from(value, encodingOrOffset, length) {
if (typeof Buffer.from === 'function' && (!global.Uint8Array || Uint8Array.from !== Buffer.from)) {
return Buffer.from(value, encodingOrOffset, length);
}
if (typeof value === 'number') {
throw new TypeError('"value" argument must not be a number');
}
if (typeof value === 'string') {
return new Buffer(value, encodingOrOffset);
}
if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
var offset = encodingOrOffset;
if (arguments.length === 1) {
return new Buffer(value);
}
if (typeof offset === 'undefined') {
offset = 0;
}
var len = length;
if (typeof len === 'undefined') {
len = value.byteLength - offset;
}
if (offset >= value.byteLength) {
throw new RangeError('\'offset\' is out of bounds');
}
if (len > value.byteLength - offset) {
throw new RangeError('\'length\' is out of bounds');
}
return new Buffer(value.slice(offset, offset + len));
}
if (Buffer.isBuffer(value)) {
var out = new Buffer(value.length);
value.copy(out, 0, 0, value.length);
return out;
}
if (value) {
if (Array.isArray(value) || (typeof ArrayBuffer !== 'undefined' && value.buffer instanceof ArrayBuffer) || 'length' in value) {
return new Buffer(value);
}
if (value.type === 'Buffer' && Array.isArray(value.data)) {
return new Buffer(value.data);
}
}
throw new TypeError('First argument must be a string, Buffer, ' + 'ArrayBuffer, Array, or array-like object.');
}
exports.allocUnsafeSlow = function allocUnsafeSlow(size) {
if (typeof Buffer.allocUnsafeSlow === 'function') {
return Buffer.allocUnsafeSlow(size);
}
if (typeof size !== 'number') {
throw new TypeError('size must be a number');
}
if (size >= MAX_LEN) {
throw new RangeError('size is too large');
}
return new SlowBuffer(size);
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| util.js | 63.83% | (30 / 47) | 0% | (0 / 12) | 0% | (0 / 15) | 63.83% | (30 / 47) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // NOTE: These type checking functions intentionally don't use `instanceof` // because it is fragile and can be easily faked with `Object.create()`. function isArray(arg) { if (Array.isArray) { return Array.isArray(arg); } return objectToString(arg) === '[object Array]'; } exports.isArray = isArray; function isBoolean(arg) { return typeof arg === 'boolean'; } exports.isBoolean = isBoolean; function isNull(arg) { return arg === null; } exports.isNull = isNull; function isNullOrUndefined(arg) { return arg == null; } exports.isNullOrUndefined = isNullOrUndefined; function isNumber(arg) { return typeof arg === 'number'; } exports.isNumber = isNumber; function isString(arg) { return typeof arg === 'string'; } exports.isString = isString; function isSymbol(arg) { return typeof arg === 'symbol'; } exports.isSymbol = isSymbol; function isUndefined(arg) { return arg === void 0; } exports.isUndefined = isUndefined; function isRegExp(re) { return objectToString(re) === '[object RegExp]'; } exports.isRegExp = isRegExp; function isObject(arg) { return typeof arg === 'object' && arg !== null; } exports.isObject = isObject; function isDate(d) { return objectToString(d) === '[object Date]'; } exports.isDate = isDate; function isError(e) { return (objectToString(e) === '[object Error]' || e instanceof Error); } exports.isError = isError; function isFunction(arg) { return typeof arg === 'function'; } exports.isFunction = isFunction; function isPrimitive(arg) { return arg === null || typeof arg === 'boolean' || typeof arg === 'number' || typeof arg === 'string' || typeof arg === 'symbol' || // ES6 symbol typeof arg === 'undefined'; } exports.isPrimitive = isPrimitive; exports.isBuffer = Buffer.isBuffer; function objectToString(o) { return Object.prototype.toString.call(o); } |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 66.67% | (2 / 3) | 50% | (1 / 2) | 0% | (0 / 1) | 66.67% | (2 / 3) |
| 1 2 3 4 5 6 7 | 1 1 | var toString = {}.toString;
module.exports = Array.isArray || function (arr) {
return toString.call(arr) == '[object Array]';
};
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 13.64% | (3 / 22) | 28.57% | (4 / 14) | 0% | (0 / 5) | 13.64% | (3 / 22) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 | 1 1 1 | 'use strict'; Iif (!process.version || process.version.indexOf('v0.') === 0 || process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { module.exports = nextTick; } else { module.exports = process.nextTick; } function nextTick(fn, arg1, arg2, arg3) { if (typeof fn !== 'function') { throw new TypeError('"callback" argument must be a function'); } var len = arguments.length; var args, i; switch (len) { case 0: case 1: return process.nextTick(fn); case 2: return process.nextTick(function afterTickOne() { fn.call(null, arg1); }); case 3: return process.nextTick(function afterTickTwo() { fn.call(null, arg1, arg2); }); case 4: return process.nextTick(function afterTickThree() { fn.call(null, arg1, arg2, arg3); }); default: args = new Array(len - 1); i = 0; while (i < args.length) { args[i++] = arguments[i]; } return process.nextTick(function afterTick() { fn.apply(null, args); }); } } |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| node.js | 100% | (1 / 1) | 100% | (0 / 0) | 100% | (0 / 0) | 100% | (1 / 1) |
| 1 2 3 4 5 6 7 8 | 1 |
/**
* For Node.js, simply re-export the core `util.deprecate` function.
*/
module.exports = require('util').deprecate;
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| https-proxy-agent.js | 17.7% | (20 / 113) | 0% | (0 / 40) | 0% | (0 / 9) | 18.35% | (20 / 109) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 |
/**
* Module dependencies.
*/
var net = require('net');
var tls = require('tls');
var url = require('url');
var extend = require('extend');
var Agent = require('agent-base');
var inherits = require('util').inherits;
var debug = require('debug')('https-proxy-agent');
/**
* Module exports.
*/
module.exports = HttpsProxyAgent;
/**
* The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to the
* specified "HTTP(s) proxy server" in order to proxy HTTPS requests.
*
* @api public
*/
function HttpsProxyAgent (opts) {
if (!(this instanceof HttpsProxyAgent)) return new HttpsProxyAgent(opts);
if ('string' == typeof opts) opts = url.parse(opts);
if (!opts) throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');
debug('creating new HttpsProxyAgent instance: %j', opts);
Agent.call(this, connect);
var proxy = extend({}, opts);
// if `true`, then connect to the proxy server over TLS. defaults to `false`.
this.secureProxy = proxy.protocol ? /^https:?$/i.test(proxy.protocol) : false;
// if `true`, then connect to the destination endpoint over TLS, defaults to `true`
this.secureEndpoint = opts.secureEndpoint !== false;
// prefer `hostname` over `host`, and set the `port` if needed
proxy.host = proxy.hostname || proxy.host;
proxy.port = +proxy.port || (this.secureProxy ? 443 : 80);
if (proxy.host && proxy.path) {
// if both a `host` and `path` are specified then it's most likely the
// result of a `url.parse()` call... we need to remove the `path` portion so
// that `net.connect()` doesn't attempt to open that as a unix socket file.
delete proxy.path;
delete proxy.pathname;
}
this.proxy = proxy;
}
inherits(HttpsProxyAgent, Agent);
/**
* Default options for the "connect" opts object.
*/
var defaults = { port: 80 };
var secureDefaults = { port: 443 };
/**
* Called when the node-core HTTP client library is creating a new HTTP request.
*
* @api public
*/
function connect (req, _opts, fn) {
var proxy = this.proxy;
var secureProxy = this.secureProxy;
var secureEndpoint = this.secureEndpoint;
// create a socket connection to the proxy server
var socket;
if (secureProxy) {
socket = tls.connect(proxy);
} else {
socket = net.connect(proxy);
}
// these `opts` are the connect options to connect to the destination endpoint
// XXX: we mix in the proxy options so that TLS options like
// `rejectUnauthorized` get passed to the destination endpoint as well
var proxyOpts = extend({}, proxy);
delete proxyOpts.host;
delete proxyOpts.hostname;
delete proxyOpts.port;
var opts = extend({}, proxyOpts, secureEndpoint ? secureDefaults : defaults, _opts);
// we need to buffer any HTTP traffic that happens with the proxy before we get
// the CONNECT response, so that if the response is anything other than an "200"
// response code, then we can re-play the "data" events on the socket once the
// HTTP parser is hooked up...
var buffers = [];
var buffersLength = 0;
function read () {
var b = socket.read();
if (b) ondata(b);
else socket.once('readable', read);
}
function cleanup () {
socket.removeListener('data', ondata);
socket.removeListener('end', onend);
socket.removeListener('error', onerror);
socket.removeListener('close', onclose);
socket.removeListener('readable', read);
}
function onclose (err) {
debug('onclose had error', err);
}
function onend () {
debug('onend');
}
function onerror (err) {
cleanup();
fn(err);
}
function ondata (b) {
buffers.push(b);
buffersLength += b.length;
var buffered = Buffer.concat(buffers, buffersLength);
var str = buffered.toString('ascii');
if (!~str.indexOf('\r\n\r\n')) {
// keep buffering
debug('have not received end of HTTP headers yet...');
if (socket.read) {
read();
} else {
socket.once('data', ondata);
}
return;
}
var firstLine = str.substring(0, str.indexOf('\r\n'));
var statusCode = +firstLine.split(' ')[1];
debug('got proxy server response: "%s"', firstLine);
//console.log('statusCode: %d', statusCode);
//console.log(b.length, b, b.toString());
if (200 == statusCode) {
// 200 Connected status code!
var sock = socket;
// nullify the buffered data since we won't be needing it
buffers = buffered = null;
if (secureEndpoint) {
// since the proxy is connecting to an SSL server, we have
// to upgrade this socket connection to an SSL connection
debug('upgrading proxy-connected socket to TLS connection: "%s"', opts.host);
opts.socket = socket;
opts.servername = opts.host;
opts.host = null;
opts.hostname = null;
opts.port = null;
sock = tls.connect(opts);
}
cleanup();
fn(null, sock);
} else {
// some other status code that's not 200... need to re-play the HTTP header
// "data" events onto the socket once the HTTP machinery is attached so that
// the user can parse and handle the error status code
cleanup();
// save a reference to the concat'd Buffer for the `onsocket` callback
buffers = buffered;
// need to wait for the "socket" event to re-play the "data" events
req.once('socket', onsocket);
fn(null, socket);
}
}
function onsocket (socket) {
// replay the "buffers" Buffer onto the `socket`, since at this point
// the HTTP module machinery has been hooked up for the user
if ('function' == typeof socket.ondata) {
// node <= v0.11.3, the `ondata` function is set on the socket
socket.ondata(buffers, 0, buffers.length);
} else if (socket.listeners('data').length > 0) {
// node > v0.11.3, the "data" event is listened for directly
socket.emit('data', buffers);
} else {
// never?
throw new Error('should not happen...');
}
// nullify the cached Buffer instance
buffers = null;
}
socket.on('error', onerror);
socket.on('close', onclose);
socket.on('end', onend);
if (socket.read) {
read();
} else {
socket.once('data', ondata);
}
var hostname = opts.host + ':' + opts.port;
var msg = 'CONNECT ' + hostname + ' HTTP/1.1\r\n';
var auth = proxy.auth;
if (auth) {
msg += 'Proxy-Authorization: Basic ' + new Buffer(auth).toString('base64') + '\r\n';
}
msg += 'Host: ' + hostname + '\r\n' +
'Connection: close\r\n' +
'\r\n';
socket.write(msg);
};
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| agent.js | 21.21% | (7 / 33) | 0% | (0 / 16) | 0% | (0 / 4) | 22.58% | (7 / 31) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 | 1 1 1 1 1 1 1 |
/**
* Module dependencies.
*/
var inherits = require('util').inherits;
var EventEmitter = require('events').EventEmitter;
/**
* Module exports.
*/
module.exports = Agent;
/**
*
* @api public
*/
function Agent (callback) {
if (!(this instanceof Agent)) return new Agent(callback);
if ('function' != typeof callback) throw new Error('Must pass a "callback function"');
EventEmitter.call(this);
this.callback = callback;
}
inherits(Agent, EventEmitter);
/**
* Called by node-core's "_http_client.js" module when creating
* a new HTTP request with this Agent instance.
*
* @api public
*/
Agent.prototype.addRequest = function (req, host, port, localAddress) {
var opts;
if ('object' == typeof host) {
// >= v0.11.x API
opts = host;
if (opts.host && opts.path) {
// if both a `host` and `path` are specified then it's most likely the
// result of a `url.parse()` call... we need to remove the `path` portion so
// that `net.connect()` doesn't attempt to open that as a unix socket file.
delete opts.path;
}
} else {
// <= v0.10.x API
opts = { host: host, port: port };
if (null != localAddress) {
opts.localAddress = localAddress;
}
}
// hint to use "Connection: close"
// XXX: non-documented `http` module API :(
req._last = true;
req.shouldKeepAlive = false;
// create the `net.Socket` instance
var sync = true;
this.callback(req, opts, function (err, socket) {
function emitErr () {
req.emit('error', err);
// For Safety. Some additional errors might fire later on
// and we need to make sure we don't double-fire the error event.
req._hadError = true;
}
if (err) {
if (sync) {
// need to defer the "error" event, when sync, because by now the `req`
// instance hasn't event been passed back to the user yet...
process.nextTick(emitErr);
} else {
emitErr();
}
} else {
req.onSocket(socket);
}
});
sync = false;
};
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 20.83% | (10 / 48) | 0% | (0 / 64) | 0% | (0 / 5) | 20.83% | (10 / 48) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 | 1 1 1 1 1 1 1 1 1 1 | /**
* Helpers.
*/
var s = 1000
var m = s * 60
var h = m * 60
var d = h * 24
var y = d * 365.25
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} options
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
module.exports = function (val, options) {
options = options || {}
var type = typeof val
if (type === 'string' && val.length > 0) {
return parse(val)
} else if (type === 'number' && isNaN(val) === false) {
return options.long ?
fmtLong(val) :
fmtShort(val)
}
throw new Error('val is not a non-empty string or a valid number. val=' + JSON.stringify(val))
}
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str)
if (str.length > 10000) {
return
}
var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(str)
if (!match) {
return
}
var n = parseFloat(match[1])
var type = (match[2] || 'ms').toLowerCase()
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y
case 'days':
case 'day':
case 'd':
return n * d
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n
default:
return undefined
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
if (ms >= d) {
return Math.round(ms / d) + 'd'
}
if (ms >= h) {
return Math.round(ms / h) + 'h'
}
if (ms >= m) {
return Math.round(ms / m) + 'm'
}
if (ms >= s) {
return Math.round(ms / s) + 's'
}
return ms + 'ms'
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
return plural(ms, d, 'day') ||
plural(ms, h, 'hour') ||
plural(ms, m, 'minute') ||
plural(ms, s, 'second') ||
ms + ' ms'
}
/**
* Pluralization helper.
*/
function plural(ms, n, name) {
if (ms < n) {
return
}
if (ms < n * 1.5) {
return Math.floor(ms / n) + ' ' + name
}
return Math.ceil(ms / n) + ' ' + name + 's'
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 | 1 1 1 1 1 1 1 1 1 1 1 1 1 21 21 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 |
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = createDebug.debug = createDebug['default'] = createDebug;
exports.coerce = coerce;
exports.disable = disable;
exports.enable = enable;
exports.enabled = enabled;
exports.humanize = require('ms');
/**
* The currently active debug mode names, and names to skip.
*/
exports.names = [];
exports.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
exports.formatters = {};
/**
* Previous log timestamp.
*/
var prevTime;
/**
* Select a color.
* @param {String} namespace
* @return {Number}
* @api private
*/
function selectColor(namespace) {
var hash = 0, i;
for (i in namespace) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return exports.colors[Math.abs(hash) % exports.colors.length];
}
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
function debug() {
// disabled?
if (!debug.enabled) return;
var self = debug;
// set `diff` timestamp
var curr = +new Date();
var ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
// turn the `arguments` into a proper Array
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
args[0] = exports.coerce(args[0]);
if ('string' !== typeof args[0]) {
// anything else let's inspect with %O
args.unshift('%O');
}
// apply any `formatters` transformations
var index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) {
// if we encounter an escaped % then don't increase the array index
if (match === '%%') return match;
index++;
var formatter = exports.formatters[format];
if ('function' === typeof formatter) {
var val = args[index];
match = formatter.call(self, val);
// now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// apply env-specific formatting (colors, etc.)
exports.formatArgs.call(self, args);
var logFn = debug.log || exports.log || console.log.bind(console);
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = exports.enabled(namespace);
debug.useColors = exports.useColors();
debug.color = selectColor(namespace);
// env-specific initialization logic for debug instances
Eif ('function' === typeof exports.init) {
exports.init(debug);
}
return debug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
exports.save(namespaces);
exports.names = [];
exports.skips = [];
var split = (namespaces || '').split(/[\s,]+/);
var len = split.length;
for (var i = 0; i < len; i++) {
Eif (!split[i]) continue; // ignore empty strings
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
exports.names.push(new RegExp('^' + namespaces + '$'));
}
}
}
/**
* Disable debug output.
*
* @api public
*/
function disable() {
exports.enable('');
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
var i, len;
for (i = 0, len = exports.skips.length; i < len; i++) {
if (exports.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = exports.names.length; i < len; i++) {
if (exports.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) return val.stack || val.message;
return val;
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 | 1 1 | /** * Detect Electron renderer process, which is node, but we should * treat as a browser. */ Iif (typeof process !== 'undefined' && process.type === 'renderer') { module.exports = require('./browser.js'); } else { module.exports = require('./node.js'); } |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 | 1 1 1 1 1 1 1 1 1 1 1 268 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | /**
* Module dependencies.
*/
var tty = require('tty');
var util = require('util');
/**
* This is the Node.js implementation of `debug()`.
*
* Expose `debug()` as the module.
*/
exports = module.exports = require('./debug');
exports.init = init;
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
/**
* Colors.
*/
exports.colors = [6, 2, 3, 4, 5, 1];
/**
* Build up the default `inspectOpts` object from the environment variables.
*
* $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
exports.inspectOpts = Object.keys(process.env).filter(function (key) {
return /^debug_/i.test(key);
}).reduce(function (obj, key) {
// camel-case
var prop = key
.substring(6)
.toLowerCase()
.replace(/_([a-z])/, function (_, k) { return k.toUpperCase() });
// coerce string value into JS value
var val = process.env[key];
if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
else if (val === 'null') val = null;
else val = Number(val);
obj[prop] = val;
return obj;
}, {});
/**
* The file descriptor to write the `debug()` calls to.
* Set the `DEBUG_FD` env variable to override with another value. i.e.:
*
* $ DEBUG_FD=3 node script.js 3>debug.log
*/
var fd = parseInt(process.env.DEBUG_FD, 10) || 2;
Iif (1 !== fd && 2 !== fd) {
util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')()
}
var stream = 1 === fd ? process.stdout :
2 === fd ? process.stderr :
createWritableStdioStream(fd);
/**
* Is stdout a TTY? Colored output is enabled when `true`.
*/
function useColors() {
return 'colors' in exports.inspectOpts
? Boolean(exports.inspectOpts.colors)
: tty.isatty(fd);
}
/**
* Map %o to `util.inspect()`, all on a single line.
*/
exports.formatters.o = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts)
.replace(/\s*\n\s*/g, ' ');
};
/**
* Map %o to `util.inspect()`, allowing multiple lines if needed.
*/
exports.formatters.O = function(v) {
this.inspectOpts.colors = this.useColors;
return util.inspect(v, this.inspectOpts);
};
/**
* Adds ANSI color escape codes if enabled.
*
* @api public
*/
function formatArgs(args) {
var name = this.namespace;
var useColors = this.useColors;
if (useColors) {
var c = this.color;
var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m';
args[0] = prefix + args[0].split('\n').join('\n' + prefix);
args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
} else {
args[0] = new Date().toUTCString()
+ ' ' + name + ' ' + args[0];
}
}
/**
* Invokes `util.format()` with the specified arguments and writes to `stream`.
*/
function log() {
return stream.write(util.format.apply(util, arguments) + '\n');
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
Eif (null == namespaces) {
// If you set a process.env field to null or undefined, it gets cast to the
// string 'null' or 'undefined'. Just delete instead.
delete process.env.DEBUG;
} else {
process.env.DEBUG = namespaces;
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
return process.env.DEBUG;
}
/**
* Copied from `node/src/node.js`.
*
* XXX: It's lame that node doesn't expose this API out-of-the-box. It also
* relies on the undocumented `tty_wrap.guessHandleType()` which is also lame.
*/
function createWritableStdioStream (fd) {
var stream;
var tty_wrap = process.binding('tty_wrap');
// Note stream._type is used for test-module-load-list.js
switch (tty_wrap.guessHandleType(fd)) {
case 'TTY':
stream = new tty.WriteStream(fd);
stream._type = 'tty';
// Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
case 'FILE':
var fs = require('fs');
stream = new fs.SyncWriteStream(fd, { autoClose: false });
stream._type = 'fs';
break;
case 'PIPE':
case 'TCP':
var net = require('net');
stream = new net.Socket({
fd: fd,
readable: false,
writable: true
});
// FIXME Should probably have an option in net.Socket to create a
// stream from an existing fd which is writable only. But for now
// we'll just add this hack and set the `readable` member to false.
// Test: ./node test/fixtures/echo.js < /etc/passwd
stream.readable = false;
stream.read = null;
stream._type = 'pipe';
// FIXME Hack to have stream not keep the event loop alive.
// See https://github.com/joyent/node/issues/1726
if (stream._handle && stream._handle.unref) {
stream._handle.unref();
}
break;
default:
// Probably an error on in uv_guess_handle()
throw new Error('Implement me. Unknown stream file type!');
}
// For supporting legacy API we put the FD here.
stream.fd = fd;
stream._isStdio = true;
return stream;
}
/**
* Init logic for `debug` instances.
*
* Create a new `inspectOpts` object in case `useColors` is set
* differently for a particular `debug` instance.
*/
function init (debug) {
debug.inspectOpts = util._extend({}, exports.inspectOpts);
}
/**
* Enable namespaces listed in `process.env.DEBUG` initially.
*/
exports.enable(load());
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| index.js | 12.5% | (5 / 40) | 0% | (0 / 47) | 0% | (0 / 3) | 12.5% | (5 / 40) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 | 1 1 1 1 1 | 'use strict'; var hasOwn = Object.prototype.hasOwnProperty; var toStr = Object.prototype.toString; var isArray = function isArray(arr) { if (typeof Array.isArray === 'function') { return Array.isArray(arr); } return toStr.call(arr) === '[object Array]'; }; var isPlainObject = function isPlainObject(obj) { if (!obj || toStr.call(obj) !== '[object Object]') { return false; } var hasOwnConstructor = hasOwn.call(obj, 'constructor'); var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); // Not own constructor property must be Object if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { return false; } // Own properties are enumerated firstly, so to speed up, // if last one is own, then all properties are own. var key; for (key in obj) {/**/} return typeof key === 'undefined' || hasOwn.call(obj, key); }; module.exports = function extend() { var options, name, src, copy, copyIsArray, clone, target = arguments[0], i = 1, length = arguments.length, deep = false; // Handle a deep copy situation if (typeof target === 'boolean') { deep = target; target = arguments[1] || {}; // skip the boolean and the target i = 2; } else if ((typeof target !== 'object' && typeof target !== 'function') || target == null) { target = {}; } for (; i < length; ++i) { options = arguments[i]; // Only deal with non-null/undefined values if (options != null) { // Extend the base object for (name in options) { src = target[name]; copy = options[name]; // Prevent never-ending loop if (target !== copy) { // Recurse if we're merging plain objects or arrays if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { if (copyIsArray) { copyIsArray = false; clone = src && isArray(src) ? src : []; } else { clone = src && isPlainObject(src) ? src : {}; } // Never move original objects, clone them target[name] = extend(deep, clone, copy); // Don't bring in undefined values } else if (typeof copy !== 'undefined') { target[name] = copy; } } } } } // Return the modified object return target; }; |
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| stringify.js | 80% | (16 / 20) | 50% | (7 / 14) | 75% | (3 / 4) | 88.24% | (15 / 17) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 | 1 1 1 26 1 26 26 26 88 62 62 62 62 26 88 | exports = module.exports = stringify
exports.getSerialize = serializer
function stringify(obj, replacer, spaces, cycleReplacer) {
return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces)
}
function serializer(replacer, cycleReplacer) {
var stack = [], keys = []
Eif (cycleReplacer == null) cycleReplacer = function(key, value) {
if (stack[0] === value) return "[Circular ~]"
return "[Circular ~." + keys.slice(0, stack.indexOf(value)).join(".") + "]"
}
return function(key, value) {
if (stack.length > 0) {
var thisPos = stack.indexOf(this)
~thisPos ? stack.splice(thisPos + 1) : stack.push(this)
~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key)
Iif (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value)
}
else stack.push(value)
return replacer == null ? value : replacer.call(this, key, value)
}
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| readable.js | 88.89% | (8 / 9) | 75% | (3 / 4) | 100% | (0 / 0) | 88.89% | (8 / 9) |
| 1 2 3 4 5 6 7 8 9 10 11 12 | 1 1 1 1 1 1 1 1 | exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = require('stream');
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
Iif (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = require('stream');
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| _stream_duplex.js | 44.12% | (15 / 34) | 13.64% | (3 / 22) | 40% | (2 / 5) | 45.45% | (15 / 33) | |
| _stream_passthrough.js | 63.64% | (7 / 11) | 0% | (0 / 2) | 0% | (0 / 2) | 63.64% | (7 / 11) | |
| _stream_readable.js | 44.11% | (217 / 492) | 35.08% | (114 / 325) | 37.74% | (20 / 53) | 44.47% | (217 / 488) | |
| _stream_transform.js | 18.31% | (13 / 71) | 0% | (0 / 32) | 0% | (0 / 11) | 18.31% | (13 / 71) | |
| _stream_writable.js | 14.35% | (32 / 223) | 0% | (0 / 116) | 0% | (0 / 31) | 14.35% | (32 / 223) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 | 1 1 1 1 1 1 1 1 7 6 1 1 1 1 7 | // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. module.exports = Duplex; /*<replacement>*/ var objectKeys = Object.keys || function (obj) { var keys = []; for (var key in obj) keys.push(key); return keys; } /*</replacement>*/ /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ var Readable = require('./_stream_readable'); var Writable = require('./_stream_writable'); util.inherits(Duplex, Readable); forEach(objectKeys(Writable.prototype), function(method) { if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; }); function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); if (options && options.readable === false) this.readable = false; if (options && options.writable === false) this.writable = false; this.allowHalfOpen = true; if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; this.once('end', onend); } // the no-half-open enforcer function onend() { // if we allow half-open state, or if the writable side ended, // then we're ok. if (this.allowHalfOpen || this._writableState.ended) return; // no more data can be written. // But allow more writes to happen in this tick. process.nextTick(this.end.bind(this)); } function forEach (xs, f) { for (var i = 0, l = xs.length; i < l; i++) { f(xs[i], i); } } |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | 1 1 1 1 1 1 1 | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 1 1 1 1 4 4 4 1 1 3 3 1 4 4 4 4 1 4 4 4 4 4 4 4 4 4 1 1 4 3 3 3 3 3 3 3 4 4 4 4 4 4 4 4 4 4 4 4 4 1 2 2 2 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 1 2 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = Readable;
/*<replacement>*/
var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Readable.ReadableState = ReadableState;
var EE = require('events').EventEmitter;
/*<replacement>*/
Iif (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
var Stream = require('stream');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var StringDecoder;
/*<replacement>*/
var debug = require('util');
Eif (debug && debug.debuglog) {
debug = debug.debuglog('stream');
} else {
debug = function () {};
}
/*</replacement>*/
util.inherits(Readable, Stream);
function ReadableState(options, stream) {
var Duplex = require('./_stream_duplex');
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = options.objectMode ? 16 : 16 * 1024;
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
Iif (stream instanceof Duplex)
this.objectMode = this.objectMode || !!options.readableObjectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
Iif (options.encoding) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
var Duplex = require('./_stream_duplex');
Iif (!(this instanceof Readable))
return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
Eif (util.isString(chunk) && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
Eif (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
Iif (er) {
stream.emit('error', er);
} else Iif (util.isNullOrUndefined(chunk)) {
state.reading = false;
if (!state.ended)
onEofChunk(stream, state);
} else Eif (state.objectMode || chunk && chunk.length > 0) {
Iif (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else Iif (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
Iif (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
Eif (!addToFront)
state.reading = false;
// if we want the data now, just emit it.
Eif (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront)
state.buffer.unshift(chunk);
else
state.buffer.push(chunk);
if (state.needReadable)
emitReadable(stream);
}
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function(enc) {
if (!StringDecoder)
StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
};
// Don't raise the hwm > 128MB
var MAX_HWM = 0x800000;
function roundUpToNextPowerOf2(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead(n, state) {
Iif (state.length === 0 && state.ended)
return 0;
Iif (state.objectMode)
return n === 0 ? 0 : 1;
if (isNaN(n) || util.isNull(n)) {
// only flow one buffer at a time
Iif (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
Eif (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
if (!util.isNumber(n) || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
Iif (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
if (state.length === 0 && state.ended)
endReadable(this);
else
emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
Iif (n === 0 && state.ended) {
if (state.length === 0)
endReadable(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
debug('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
Eif (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
debug('length less than watermark', doRead);
}
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading) {
doRead = false;
debug('reading or ended', doRead);
}
if (doRead) {
debug('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
Eif (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
Iif (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
var ret;
Iif (n > 0)
ret = fromList(n, state);
else
ret = null;
Eif (util.isNull(ret)) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
Eif (state.length === 0 && !state.ended)
state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
Iif (nOrig !== n && state.ended && state.length === 0)
endReadable(this);
Iif (!util.isNull(ret))
this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
Iif (!util.isBuffer(chunk) &&
!util.isString(chunk) &&
!util.isNullOrUndefined(chunk) &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// emit 'readable' now to make sure it gets picked up.
emitReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_(stream);
});
else
emitReadable_(stream);
}
}
function emitReadable_(stream) {
debug('emit readable');
stream.emit('readable');
flow(stream);
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_(stream, state);
});
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
Iif (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
debug('onunpipe');
if (readable === src) {
cleanup();
}
}
function onend() {
debug('onend');
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
function cleanup() {
debug('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
src.removeListener('data', ondata);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (state.awaitDrain &&
(!dest._writableState || dest._writableState.needDrain))
ondrain();
}
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
var ret = dest.write(chunk);
Iif (false === ret) {
debug('false write response, pause',
src._readableState.awaitDrain);
src._readableState.awaitDrain++;
src.pause();
}
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EE.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
Eif (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isArray(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
debug('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
debug('unpipe');
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
Iif (!state.flowing) {
debug('pipe resume');
src.resume();
}
return dest;
};
function pipeOnDrain(src) {
return function() {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
if (state.awaitDrain)
state.awaitDrain--;
if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) {
state.flowing = true;
flow(src);
}
};
}
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function(ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
// then call resume to start the flow of data on the next tick.
if (ev === 'data' && false !== this._readableState.flowing) {
this.resume();
}
Iif (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
var self = this;
process.nextTick(function() {
debug('readable nexttick read 0');
self.read(0);
});
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
var state = this._readableState;
Eif (!state.flowing) {
debug('resume');
state.flowing = true;
Eif (!state.reading) {
debug('resume read 0');
this.read(0);
}
resume(this, state);
}
return this;
};
function resume(stream, state) {
Eif (!state.resumeScheduled) {
state.resumeScheduled = true;
process.nextTick(function() {
resume_(stream, state);
});
}
}
function resume_(stream, state) {
state.resumeScheduled = false;
stream.emit('resume');
flow(stream);
Iif (state.flowing && !state.reading)
stream.read(0);
}
Readable.prototype.pause = function() {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
this._readableState.flowing = false;
this.emit('pause');
}
return this;
};
function flow(stream) {
var state = stream._readableState;
debug('flow', state.flowing);
Eif (state.flowing) {
do {
var chunk = stream.read();
} while (null !== chunk && state.flowing);
}
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
debug('wrapped data');
if (state.decoder)
chunk = state.decoder.write(chunk);
if (!chunk || !state.objectMode && !chunk.length)
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (util.isFunction(stream[i]) && util.isUndefined(this[i])) {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 | 1 1 1 1 1 1 1 1 1 1 1 1 1 | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function TransformState(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (!util.isNullOrUndefined(data))
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform))
return new Transform(options);
Duplex.call(this, options);
this._transformState = new TransformState(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('prefinish', function() {
if (util.isFunction(this._flush))
this._flush(function(er) {
done(stream, er);
});
else
done(stream);
});
}
Transform.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function(n) {
var ts = this._transformState;
if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
|
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | // Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
module.exports = Writable;
/*<replacement>*/
var Buffer = require('buffer').Buffer;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Stream = require('stream');
util.inherits(Writable, Stream);
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState(options, stream) {
var Duplex = require('./_stream_duplex');
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var defaultHwm = options.objectMode ? 16 : 16 * 1024;
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex)
this.objectMode = this.objectMode || !!options.writableObjectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable(options) {
var Duplex = require('./_stream_duplex');
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable) && !(this instanceof Duplex))
return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
if (!util.isBuffer(chunk) &&
!util.isString(chunk) &&
!util.isNullOrUndefined(chunk) &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (util.isFunction(encoding)) {
cb = encoding;
encoding = null;
}
if (util.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (!util.isFunction(cb))
cb = function() {};
if (state.ended)
writeAfterEnd(this, state, cb);
else if (validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function() {
var state = this._writableState;
state.corked++;
};
Writable.prototype.uncork = function() {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing &&
!state.corked &&
!state.finished &&
!state.bufferProcessing &&
state.buffer.length)
clearBuffer(this, state);
}
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
util.isString(chunk)) {
chunk = new Buffer(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
if (util.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing || state.corked)
state.buffer.push(new WriteReq(chunk, encoding, cb));
else
doWrite(stream, state, false, len, chunk, encoding, cb);
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev)
stream._writev(chunk, state.onwrite);
else
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
state.pendingcb--;
cb(er);
});
else {
state.pendingcb--;
cb(er);
}
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er)
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(stream, state);
if (!finished &&
!state.corked &&
!state.bufferProcessing &&
state.buffer.length) {
clearBuffer(stream, state);
}
if (sync) {
process.nextTick(function() {
afterWrite(stream, state, finished, cb);
});
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
if (stream._writev && state.buffer.length > 1) {
// Fast case, write everything using _writev()
var cbs = [];
for (var c = 0; c < state.buffer.length; c++)
cbs.push(state.buffer[c].callback);
// count the one we are adding, as well.
// TODO(isaacs) clean this up
state.pendingcb++;
doWrite(stream, state, true, state.length, state.buffer, '', function(err) {
for (var i = 0; i < cbs.length; i++) {
state.pendingcb--;
cbs[i](err);
}
});
// Clear buffer
state.buffer = [];
} else {
// Slow case, write chunks one-by-one
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
state.bufferProcessing = false;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (util.isFunction(chunk)) {
cb = chunk;
chunk = null;
encoding = null;
} else if (util.isFunction(encoding)) {
cb = encoding;
encoding = null;
}
if (!util.isNullOrUndefined(chunk))
this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
function needFinish(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function prefinish(stream, state) {
if (!state.prefinished) {
state.prefinished = true;
stream.emit('prefinish');
}
}
function finishMaybe(stream, state) {
var need = needFinish(stream, state);
if (need) {
if (state.pendingcb === 0) {
prefinish(stream, state);
state.finished = true;
stream.emit('finish');
} else
prefinish(stream, state);
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
|
| File | Statements | Branches | Functions | Lines | |||||
|---|---|---|---|---|---|---|---|---|---|
| semver.js | 48.96% | (328 / 670) | 22.13% | (81 / 366) | 45.12% | (37 / 82) | 49.55% | (327 / 660) |
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 34 34 31 1 1 1 1 1 1 1 1 3 1 1 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 1 2 2 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 3 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 | exports = module.exports = SemVer; // The debug function is excluded entirely from the minified version. /* nomin */ var debug; /* nomin */ Iif (typeof process === 'object' && /* nomin */ process.env && /* nomin */ process.env.NODE_DEBUG && /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) /* nomin */ debug = function() { /* nomin */ var args = Array.prototype.slice.call(arguments, 0); /* nomin */ args.unshift('SEMVER'); /* nomin */ console.log.apply(console, args); /* nomin */ }; /* nomin */ else /* nomin */ debug = function() {}; // Note: this is the semver.org version of the spec that it implements // Not necessarily the package version of this code. exports.SEMVER_SPEC_VERSION = '2.0.0'; var MAX_LENGTH = 256; var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; // The actual regexps go on exports.re var re = exports.re = []; var src = exports.src = []; var R = 0; // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. var NUMERICIDENTIFIER = R++; src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; var NUMERICIDENTIFIERLOOSE = R++; src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. var NONNUMERICIDENTIFIER = R++; src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; // ## Main Version // Three dot-separated numeric identifiers. var MAINVERSION = R++; src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')'; var MAINVERSIONLOOSE = R++; src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. var PRERELEASEIDENTIFIER = R++; src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + '|' + src[NONNUMERICIDENTIFIER] + ')'; var PRERELEASEIDENTIFIERLOOSE = R++; src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + '|' + src[NONNUMERICIDENTIFIER] + ')'; // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. var PRERELEASE = R++; src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; var PRERELEASELOOSE = R++; src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. var BUILDIDENTIFIER = R++; src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. var BUILD = R++; src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; // ## Full Version String // A main version, followed optionally by a pre-release version and // build metadata. // Note that the only major, minor, patch, and pre-release sections of // the version string are capturing groups. The build metadata is not a // capturing group, because it should not ever be used in version // comparison. var FULL = R++; var FULLPLAIN = 'v?' + src[MAINVERSION] + src[PRERELEASE] + '?' + src[BUILD] + '?'; src[FULL] = '^' + FULLPLAIN + '$'; // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + src[PRERELEASELOOSE] + '?' + src[BUILD] + '?'; var LOOSE = R++; src[LOOSE] = '^' + LOOSEPLAIN + '$'; var GTLT = R++; src[GTLT] = '((?:<|>)?=?)'; // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. var XRANGEIDENTIFIERLOOSE = R++; src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; var XRANGEIDENTIFIER = R++; src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; var XRANGEPLAIN = R++; src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:' + src[PRERELEASE] + ')?' + src[BUILD] + '?' + ')?)?'; var XRANGEPLAINLOOSE = R++; src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:' + src[PRERELEASELOOSE] + ')?' + src[BUILD] + '?' + ')?)?'; var XRANGE = R++; src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; var XRANGELOOSE = R++; src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; // Tilde ranges. // Meaning is "reasonably at or greater than" var LONETILDE = R++; src[LONETILDE] = '(?:~>?)'; var TILDETRIM = R++; src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); var tildeTrimReplace = '$1~'; var TILDE = R++; src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; var TILDELOOSE = R++; src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; // Caret ranges. // Meaning is "at least and backwards compatible with" var LONECARET = R++; src[LONECARET] = '(?:\\^)'; var CARETTRIM = R++; src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); var caretTrimReplace = '$1^'; var CARET = R++; src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; var CARETLOOSE = R++; src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; // A simple gt/lt/eq thing, or just "" to indicate "any version" var COMPARATORLOOSE = R++; src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; var COMPARATOR = R++; src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` var COMPARATORTRIM = R++; src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; // this one has to use the /g flag re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); var comparatorTrimReplace = '$1$2$3'; // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. var HYPHENRANGE = R++; src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAIN] + ')' + '\\s*$'; var HYPHENRANGELOOSE = R++; src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAINLOOSE] + ')' + '\\s*$'; // Star ranges basically just allow anything at all. var STAR = R++; src[STAR] = '(<|>)?=?\\s*\\*'; // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. for (var i = 0; i < R; i++) { debug(i, src[i]); if (!re[i]) re[i] = new RegExp(src[i]); } exports.parse = parse; function parse(version, loose) { if (version instanceof SemVer) return version; if (typeof version !== 'string') return null; if (version.length > MAX_LENGTH) return null; var r = loose ? re[LOOSE] : re[FULL]; if (!r.test(version)) return null; try { return new SemVer(version, loose); } catch (er) { return null; } } exports.valid = valid; function valid(version, loose) { var v = parse(version, loose); return v ? v.version : null; } exports.clean = clean; function clean(version, loose) { var s = parse(version.trim().replace(/^[=v]+/, ''), loose); return s ? s.version : null; } exports.SemVer = SemVer; function SemVer(version, loose) { if (version instanceof SemVer) { Eif (version.loose === loose) return version; else version = version.version; } else Iif (typeof version !== 'string') { throw new TypeError('Invalid Version: ' + version); } Iif (version.length > MAX_LENGTH) throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') Iif (!(this instanceof SemVer)) return new SemVer(version, loose); debug('SemVer', version, loose); this.loose = loose; var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); Iif (!m) throw new TypeError('Invalid Version: ' + version); this.raw = version; // these are actually numbers this.major = +m[1]; this.minor = +m[2]; this.patch = +m[3]; Iif (this.major > MAX_SAFE_INTEGER || this.major < 0) throw new TypeError('Invalid major version') Iif (this.minor > MAX_SAFE_INTEGER || this.minor < 0) throw new TypeError('Invalid minor version') Iif (this.patch > MAX_SAFE_INTEGER || this.patch < 0) throw new TypeError('Invalid patch version') // numberify any prerelease numeric ids Eif (!m[4]) this.prerelease = []; else this.prerelease = m[4].split('.').map(function(id) { if (/^[0-9]+$/.test(id)) { var num = +id; if (num >= 0 && num < MAX_SAFE_INTEGER) return num; } return id; }); this.build = m[5] ? m[5].split('.') : []; this.format(); } SemVer.prototype.format = function() { this.version = this.major + '.' + this.minor + '.' + this.patch; Iif (this.prerelease.length) this.version += '-' + this.prerelease.join('.'); return this.version; }; SemVer.prototype.toString = function() { return this.version; }; SemVer.prototype.compare = function(other) { debug('SemVer.compare', this.version, this.loose, other); Iif (!(other instanceof SemVer)) other = new SemVer(other, this.loose); return this.compareMain(other) || this.comparePre(other); }; SemVer.prototype.compareMain = function(other) { Iif (!(other instanceof SemVer)) other = new SemVer(other, this.loose); return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); }; SemVer.prototype.comparePre = function(other) { if (!(other instanceof SemVer)) other = new SemVer(other, this.loose); // NOT having a prerelease is > having one if (this.prerelease.length && !other.prerelease.length) return -1; else if (!this.prerelease.length && other.prerelease.length) return 1; else if (!this.prerelease.length && !other.prerelease.length) return 0; var i = 0; do { var a = this.prerelease[i]; var b = other.prerelease[i]; debug('prerelease compare', i, a, b); if (a === undefined && b === undefined) return 0; else if (b === undefined) return 1; else if (a === undefined) return -1; else if (a === b) continue; else return compareIdentifiers(a, b); } while (++i); }; // preminor will bump the version up to the next minor release, and immediately // down to pre-release. premajor and prepatch work the same way. SemVer.prototype.inc = function(release, identifier) { switch (release) { case 'premajor': this.prerelease.length = 0; this.patch = 0; this.minor = 0; this.major++; this.inc('pre', identifier); break; case 'preminor': this.prerelease.length = 0; this.patch = 0; this.minor++; this.inc('pre', identifier); break; case 'prepatch': // If this is already a prerelease, it will bump to the next version // drop any prereleases that might already exist, since they are not // relevant at this point. this.prerelease.length = 0; this.inc('patch', identifier); this.inc('pre', identifier); break; // If the input is a non-prerelease version, this acts the same as // prepatch. case 'prerelease': if (this.prerelease.length === 0) this.inc('patch', identifier); this.inc('pre', identifier); break; case 'major': // If this is a pre-major version, bump up to the same major version. // Otherwise increment major. // 1.0.0-5 bumps to 1.0.0 // 1.1.0 bumps to 2.0.0 if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) this.major++; this.minor = 0; this.patch = 0; this.prerelease = []; break; case 'minor': // If this is a pre-minor version, bump up to the same minor version. // Otherwise increment minor. // 1.2.0-5 bumps to 1.2.0 // 1.2.1 bumps to 1.3.0 if (this.patch !== 0 || this.prerelease.length === 0) this.minor++; this.patch = 0; this.prerelease = []; break; case 'patch': // If this is not a pre-release version, it will increment the patch. // If it is a pre-release it will bump up to the same patch version. // 1.2.0-5 patches to 1.2.0 // 1.2.0 patches to 1.2.1 if (this.prerelease.length === 0) this.patch++; this.prerelease = []; break; // This probably shouldn't be used publicly. // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. case 'pre': if (this.prerelease.length === 0) this.prerelease = [0]; else { var i = this.prerelease.length; while (--i >= 0) { if (typeof this.prerelease[i] === 'number') { this.prerelease[i]++; i = -2; } } if (i === -1) // didn't increment anything this.prerelease.push(0); } if (identifier) { // 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 if (this.prerelease[0] === identifier) { if (isNaN(this.prerelease[1])) this.prerelease = [identifier, 0]; } else this.prerelease = [identifier, 0]; } break; default: throw new Error('invalid increment argument: ' + release); } this.format(); this.raw = this.version; return this; }; exports.inc = inc; function inc(version, release, loose, identifier) { if (typeof(loose) === 'string') { identifier = loose; loose = undefined; } try { return new SemVer(version, loose).inc(release, identifier).version; } catch (er) { return null; } } exports.diff = diff; function diff(version1, version2) { if (eq(version1, version2)) { return null; } else { var v1 = parse(version1); var v2 = parse(version2); if (v1.prerelease.length || v2.prerelease.length) { for (var key in v1) { if (key === 'major' || key === 'minor' || key === 'patch') { if (v1[key] !== v2[key]) { return 'pre'+key; } } } return 'prerelease'; } for (var key in v1) { if (key === 'major' || key === 'minor' || key === 'patch') { if (v1[key] !== v2[key]) { return key; } } } } } exports.compareIdentifiers = compareIdentifiers; var numeric = /^[0-9]+$/; function compareIdentifiers(a, b) { var anum = numeric.test(a); var bnum = numeric.test(b); Eif (anum && bnum) { a = +a; b = +b; } return (anum && !bnum) ? -1 : (bnum && !anum) ? 1 : a < b ? -1 : a > b ? 1 : 0; } exports.rcompareIdentifiers = rcompareIdentifiers; function rcompareIdentifiers(a, b) { return compareIdentifiers(b, a); } exports.major = major; function major(a, loose) { return new SemVer(a, loose).major; } exports.minor = minor; function minor(a, loose) { return new SemVer(a, loose).minor; } exports.patch = patch; function patch(a, loose) { return new SemVer(a, loose).patch; } exports.compare = compare; function compare(a, b, loose) { return new SemVer(a, loose).compare(b); } exports.compareLoose = compareLoose; function compareLoose(a, b) { return compare(a, b, true); } exports.rcompare = rcompare; function rcompare(a, b, loose) { return compare(b, a, loose); } exports.sort = sort; function sort(list, loose) { return list.sort(function(a, b) { return exports.compare(a, b, loose); }); } exports.rsort = rsort; function rsort(list, loose) { return list.sort(function(a, b) { return exports.rcompare(a, b, loose); }); } exports.gt = gt; function gt(a, b, loose) { return compare(a, b, loose) > 0; } exports.lt = lt; function lt(a, b, loose) { return compare(a, b, loose) < 0; } exports.eq = eq; function eq(a, b, loose) { return compare(a, b, loose) === 0; } exports.neq = neq; function neq(a, b, loose) { return compare(a, b, loose) !== 0; } exports.gte = gte; function gte(a, b, loose) { return compare(a, b, loose) >= 0; } exports.lte = lte; function lte(a, b, loose) { return compare(a, b, loose) <= 0; } exports.cmp = cmp; function cmp(a, op, b, loose) { var ret; switch (op) { case '===': if (typeof a === 'object') a = a.version; if (typeof b === 'object') b = b.version; ret = a === b; break; case '!==': if (typeof a === 'object') a = a.version; if (typeof b === 'object') b = b.version; ret = a !== b; break; case '': case '=': case '==': ret = eq(a, b, loose); break; case '!=': ret = neq(a, b, loose); break; case '>': ret = gt(a, b, loose); break; case '>=': ret = gte(a, b, loose); break; case '<': ret = lt(a, b, loose); break; case '<=': ret = lte(a, b, loose); break; default: throw new TypeError('Invalid operator: ' + op); } return ret; } exports.Comparator = Comparator; function Comparator(comp, loose) { Iif (comp instanceof Comparator) { if (comp.loose === loose) return comp; else comp = comp.value; } Iif (!(this instanceof Comparator)) return new Comparator(comp, loose); debug('comparator', comp, loose); this.loose = loose; this.parse(comp); Iif (this.semver === ANY) this.value = ''; else this.value = this.operator + this.semver.version; debug('comp', this); } var ANY = {}; Comparator.prototype.parse = function(comp) { var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; var m = comp.match(r); Iif (!m) throw new TypeError('Invalid comparator: ' + comp); this.operator = m[1]; Iif (this.operator === '=') this.operator = ''; // if it literally is just '>' or '' then allow anything. Iif (!m[2]) this.semver = ANY; else this.semver = new SemVer(m[2], this.loose); }; Comparator.prototype.toString = function() { return this.value; }; Comparator.prototype.test = function(version) { debug('Comparator.test', version, this.loose); Iif (this.semver === ANY) return true; Iif (typeof version === 'string') version = new SemVer(version, this.loose); return cmp(version, this.operator, this.semver, this.loose); }; exports.Range = Range; function Range(range, loose) { Iif ((range instanceof Range) && range.loose === loose) return range; Iif (!(this instanceof Range)) return new Range(range, loose); this.loose = loose; // First, split based on boolean or || this.raw = range; this.set = range.split(/\s*\|\|\s*/).map(function(range) { return this.parseRange(range.trim()); }, this).filter(function(c) { // throw out any that are not relevant for whatever reason return c.length; }); Iif (!this.set.length) { throw new TypeError('Invalid SemVer Range: ' + range); } this.format(); } Range.prototype.format = function() { this.range = this.set.map(function(comps) { return comps.join(' ').trim(); }).join('||').trim(); return this.range; }; Range.prototype.toString = function() { return this.range; }; Range.prototype.parseRange = function(range) { var loose = this.loose; range = range.trim(); debug('range', range, loose); // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; range = range.replace(hr, hyphenReplace); debug('hyphen replace', range); // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); debug('comparator trim', range, re[COMPARATORTRIM]); // `~ 1.2.3` => `~1.2.3` range = range.replace(re[TILDETRIM], tildeTrimReplace); // `^ 1.2.3` => `^1.2.3` range = range.replace(re[CARETTRIM], caretTrimReplace); // normalize spaces range = range.split(/\s+/).join(' '); // At this point, the range is completely trimmed and // ready to be split into comparators. var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; var set = range.split(' ').map(function(comp) { return parseComparator(comp, loose); }).join(' ').split(/\s+/); Iif (this.loose) { // in loose mode, throw out any that are not valid comparators set = set.filter(function(comp) { return !!comp.match(compRe); }); } set = set.map(function(comp) { return new Comparator(comp, loose); }); return set; }; // Mostly just for testing and legacy API reasons exports.toComparators = toComparators; function toComparators(range, loose) { return new Range(range, loose).set.map(function(comp) { return comp.map(function(c) { return c.value; }).join(' ').trim().split(' '); }); } // comprised of xranges, tildes, stars, and gtlt's at this point. // already replaced the hyphen ranges // turn into a set of JUST comparators. function parseComparator(comp, loose) { debug('comp', comp); comp = replaceCarets(comp, loose); debug('caret', comp); comp = replaceTildes(comp, loose); debug('tildes', comp); comp = replaceXRanges(comp, loose); debug('xrange', comp); comp = replaceStars(comp, loose); debug('stars', comp); return comp; } function isX(id) { return !id || id.toLowerCase() === 'x' || id === '*'; } // ~, ~> --> * (any, kinda silly) // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 function replaceTildes(comp, loose) { return comp.trim().split(/\s+/).map(function(comp) { return replaceTilde(comp, loose); }).join(' '); } function replaceTilde(comp, loose) { var r = loose ? re[TILDELOOSE] : re[TILDE]; return comp.replace(r, function(_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr); var ret; if (isX(M)) ret = ''; else if (isX(m)) ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; else if (isX(p)) // ~1.2 == >=1.2.0 <1.3.0 ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; else if (pr) { debug('replaceTilde pr', pr); if (pr.charAt(0) !== '-') pr = '-' + pr; ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + M + '.' + (+m + 1) + '.0'; } else // ~1.2.3 == >=1.2.3 <1.3.0 ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0'; debug('tilde return', ret); return ret; }); } // ^ --> * (any, kinda silly) // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 // ^1.2.3 --> >=1.2.3 <2.0.0 // ^1.2.0 --> >=1.2.0 <2.0.0 function replaceCarets(comp, loose) { return comp.trim().split(/\s+/).map(function(comp) { return replaceCaret(comp, loose); }).join(' '); } function replaceCaret(comp, loose) { debug('caret', comp, loose); var r = loose ? re[CARETLOOSE] : re[CARET]; return comp.replace(r, function(_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr); var ret; if (isX(M)) ret = ''; else if (isX(m)) ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; else if (isX(p)) { if (M === '0') ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; else ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; } else if (pr) { debug('replaceCaret pr', pr); if (pr.charAt(0) !== '-') pr = '-' + pr; if (M === '0') { if (m === '0') ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + M + '.' + m + '.' + (+p + 1); else ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + M + '.' + (+m + 1) + '.0'; } else ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + (+M + 1) + '.0.0'; } else { debug('no pr'); if (M === '0') { if (m === '0') ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + m + '.' + (+p + 1); else ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0'; } else ret = '>=' + M + '.' + m + '.' + p + ' <' + (+M + 1) + '.0.0'; } debug('caret return', ret); return ret; }); } function replaceXRanges(comp, loose) { debug('replaceXRanges', comp, loose); return comp.split(/\s+/).map(function(comp) { return replaceXRange(comp, loose); }).join(' '); } function replaceXRange(comp, loose) { comp = comp.trim(); var r = loose ? re[XRANGELOOSE] : re[XRANGE]; return comp.replace(r, function(ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr); var xM = isX(M); var xm = xM || isX(m); var xp = xm || isX(p); var anyX = xp; Iif (gtlt === '=' && anyX) gtlt = ''; Iif (xM) { if (gtlt === '>' || gtlt === '<') { // nothing is allowed ret = '<0.0.0'; } else { // nothing is forbidden ret = '*'; } } else Iif (gtlt && anyX) { // replace X with 0 if (xm) m = 0; if (xp) p = 0; if (gtlt === '>') { // >1 => >=2.0.0 // >1.2 => >=1.3.0 // >1.2.3 => >= 1.2.4 gtlt = '>='; if (xm) { M = +M + 1; m = 0; p = 0; } else if (xp) { m = +m + 1; p = 0; } } else if (gtlt === '<=') { // <=0.7.x is actually <0.8.0, since any 0.7.x should // pass. Similarly, <=7.x is actually <8.0.0, etc. gtlt = '<'; if (xm) M = +M + 1; else m = +m + 1; } ret = gtlt + M + '.' + m + '.' + p; } else Iif (xm) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; } else Iif (xp) { ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; } debug('xRange return', ret); return ret; }); } // Because * is AND-ed with everything else in the comparator, // and '' means "any version", just remove the *s entirely. function replaceStars(comp, loose) { debug('replaceStars', comp, loose); // Looseness is ignored here. star is always as loose as it gets! return comp.trim().replace(re[STAR], ''); } // This function is passed to string.replace(re[HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do // 1.2 - 3.4 => >=1.2.0 <3.5.0 function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { if (isX(fM)) from = ''; else if (isX(fm)) from = '>=' + fM + '.0.0'; else if (isX(fp)) from = '>=' + fM + '.' + fm + '.0'; else from = '>=' + from; if (isX(tM)) to = ''; else if (isX(tm)) to = '<' + (+tM + 1) + '.0.0'; else if (isX(tp)) to = '<' + tM + '.' + (+tm + 1) + '.0'; else if (tpr) to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; else to = '<=' + to; return (from + ' ' + to).trim(); } // if ANY of the sets match ALL of its comparators, then pass Range.prototype.test = function(version) { Iif (!version) return false; Eif (typeof version === 'string') version = new SemVer(version, this.loose); for (var i = 0; i < this.set.length; i++) { Iif (testSet(this.set[i], version)) return true; } return false; }; function testSet(set, version) { for (var i = 0; i < set.length; i++) { Eif (!set[i].test(version)) return false; } if (version.prerelease.length) { // Find the set of versions that are allowed to have prereleases // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 // That should allow `1.2.3-pr.2` to pass. // However, `1.2.4-alpha.notready` should NOT be allowed, // even though it's within the range set by the comparators. for (var i = 0; i < set.length; i++) { debug(set[i].semver); if (set[i].semver === ANY) continue; if (set[i].semver.prerelease.length > 0) { var allowed = set[i].semver; if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) return true; } } // Version has a -pre, but it's not one of the ones we like. return false; } return true; } exports.satisfies = satisfies; function satisfies(version, range, loose) { try { range = new Range(range, loose); } catch (er) { return false; } return range.test(version); } exports.maxSatisfying = maxSatisfying; function maxSatisfying(versions, range, loose) { return versions.filter(function(version) { return satisfies(version, range, loose); }).sort(function(a, b) { return rcompare(a, b, loose); })[0] || null; } exports.minSatisfying = minSatisfying; function minSatisfying(versions, range, loose) { return versions.filter(function(version) { return satisfies(version, range, loose); }).sort(function(a, b) { return compare(a, b, loose); })[0] || null; } exports.validRange = validRange; function validRange(range, loose) { try { // Return '*' instead of '' so that truthiness works. // This will throw if it's invalid anyway return new Range(range, loose).range || '*'; } catch (er) { return null; } } // Determine if version is less than all the versions possible in the range exports.ltr = ltr; function ltr(version, range, loose) { return outside(version, range, '<', loose); } // Determine if version is greater than all the versions possible in the range. exports.gtr = gtr; function gtr(version, range, loose) { return outside(version, range, '>', loose); } exports.outside = outside; function outside(version, range, hilo, loose) { version = new SemVer(version, loose); range = new Range(range, loose); var gtfn, ltefn, ltfn, comp, ecomp; switch (hilo) { case '>': gtfn = gt; ltefn = lte; ltfn = lt; comp = '>'; ecomp = '>='; break; case '<': gtfn = lt; ltefn = gte; ltfn = gt; comp = '<'; ecomp = '<='; break; default: throw new TypeError('Must provide a hilo val of "<" or ">"'); } // If it satisifes the range it is not outside if (satisfies(version, range, loose)) { return false; } // From now on, variable terms are as if we're in "gtr" mode. // but note that everything is flipped for the "ltr" function. for (var i = 0; i < range.set.length; ++i) { var comparators = range.set[i]; var high = null; var low = null; comparators.forEach(function(comparator) { if (comparator.semver === ANY) { comparator = new Comparator('>=0.0.0') } high = high || comparator; low = low || comparator; if (gtfn(comparator.semver, high.semver, loose)) { high = comparator; } else if (ltfn(comparator.semver, low.semver, loose)) { low = comparator; } }); // If the edge version comparator has a operator then our version // isn't outside it if (high.operator === comp || high.operator === ecomp) { return false; } // If the lowest version comparator has an operator and our version // is less than it then it isn't higher than the range if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { return false; } else if (low.operator === ecomp && ltfn(version, low.semver)) { return false; } } return true; } exports.prerelease = prerelease; function prerelease(version, loose) { var parsed = parse(version, loose); return (parsed && parsed.prerelease.length) ? parsed.prerelease : null; } |